var/home/core/zuul-output/0000755000175000017500000000000015066450202014525 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015066466436015511 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000005041323615066466427017721 0ustar rootrootSep 29 09:29:26 crc systemd[1]: Starting Kubernetes Kubelet... Sep 29 09:29:26 crc restorecon[4721]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Sep 29 09:29:26 crc restorecon[4721]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 09:29:27 crc restorecon[4721]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 09:29:27 crc restorecon[4721]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Sep 29 09:29:28 crc kubenswrapper[4779]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Sep 29 09:29:28 crc kubenswrapper[4779]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Sep 29 09:29:28 crc kubenswrapper[4779]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Sep 29 09:29:28 crc kubenswrapper[4779]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Sep 29 09:29:28 crc kubenswrapper[4779]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Sep 29 09:29:28 crc kubenswrapper[4779]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.461884 4779 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467133 4779 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467165 4779 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467175 4779 feature_gate.go:330] unrecognized feature gate: InsightsConfig Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467184 4779 feature_gate.go:330] unrecognized feature gate: NewOLM Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467192 4779 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467201 4779 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467208 4779 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467217 4779 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467226 4779 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467237 4779 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467247 4779 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467256 4779 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467264 4779 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467273 4779 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467280 4779 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467288 4779 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467296 4779 feature_gate.go:330] unrecognized feature gate: PinnedImages Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467315 4779 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467324 4779 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467332 4779 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467340 4779 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467347 4779 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467355 4779 feature_gate.go:330] unrecognized feature gate: PlatformOperators Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467362 4779 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467370 4779 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467377 4779 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467386 4779 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467394 4779 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467401 4779 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467409 4779 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467417 4779 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467425 4779 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467432 4779 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467439 4779 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467447 4779 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467455 4779 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467467 4779 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467476 4779 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467484 4779 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467493 4779 feature_gate.go:330] unrecognized feature gate: Example Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467501 4779 feature_gate.go:330] unrecognized feature gate: OVNObservability Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467510 4779 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467518 4779 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467526 4779 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467534 4779 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467542 4779 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467550 4779 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467558 4779 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467566 4779 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467573 4779 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467581 4779 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467589 4779 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467596 4779 feature_gate.go:330] unrecognized feature gate: GatewayAPI Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467604 4779 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467612 4779 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467620 4779 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467627 4779 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467635 4779 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467643 4779 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467650 4779 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467658 4779 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467666 4779 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467673 4779 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467681 4779 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467691 4779 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467701 4779 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467712 4779 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467722 4779 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467731 4779 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467740 4779 feature_gate.go:330] unrecognized feature gate: SignatureStores Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.467748 4779 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.467883 4779 flags.go:64] FLAG: --address="0.0.0.0" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.467925 4779 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.467944 4779 flags.go:64] FLAG: --anonymous-auth="true" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.467956 4779 flags.go:64] FLAG: --application-metrics-count-limit="100" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.467967 4779 flags.go:64] FLAG: --authentication-token-webhook="false" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.467976 4779 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.467987 4779 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.467999 4779 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468010 4779 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468020 4779 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468032 4779 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468042 4779 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468051 4779 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468059 4779 flags.go:64] FLAG: --cgroup-root="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468068 4779 flags.go:64] FLAG: --cgroups-per-qos="true" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468077 4779 flags.go:64] FLAG: --client-ca-file="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468086 4779 flags.go:64] FLAG: --cloud-config="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468095 4779 flags.go:64] FLAG: --cloud-provider="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468103 4779 flags.go:64] FLAG: --cluster-dns="[]" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468114 4779 flags.go:64] FLAG: --cluster-domain="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468122 4779 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468132 4779 flags.go:64] FLAG: --config-dir="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468140 4779 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468149 4779 flags.go:64] FLAG: --container-log-max-files="5" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468160 4779 flags.go:64] FLAG: --container-log-max-size="10Mi" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468169 4779 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468179 4779 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468188 4779 flags.go:64] FLAG: --containerd-namespace="k8s.io" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468196 4779 flags.go:64] FLAG: --contention-profiling="false" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468205 4779 flags.go:64] FLAG: --cpu-cfs-quota="true" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468214 4779 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468223 4779 flags.go:64] FLAG: --cpu-manager-policy="none" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468234 4779 flags.go:64] FLAG: --cpu-manager-policy-options="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468254 4779 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468262 4779 flags.go:64] FLAG: --enable-controller-attach-detach="true" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468271 4779 flags.go:64] FLAG: --enable-debugging-handlers="true" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468280 4779 flags.go:64] FLAG: --enable-load-reader="false" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468289 4779 flags.go:64] FLAG: --enable-server="true" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468298 4779 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468310 4779 flags.go:64] FLAG: --event-burst="100" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468320 4779 flags.go:64] FLAG: --event-qps="50" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468329 4779 flags.go:64] FLAG: --event-storage-age-limit="default=0" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468338 4779 flags.go:64] FLAG: --event-storage-event-limit="default=0" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468349 4779 flags.go:64] FLAG: --eviction-hard="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468359 4779 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468368 4779 flags.go:64] FLAG: --eviction-minimum-reclaim="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468377 4779 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468386 4779 flags.go:64] FLAG: --eviction-soft="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468395 4779 flags.go:64] FLAG: --eviction-soft-grace-period="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468404 4779 flags.go:64] FLAG: --exit-on-lock-contention="false" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468412 4779 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468421 4779 flags.go:64] FLAG: --experimental-mounter-path="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468430 4779 flags.go:64] FLAG: --fail-cgroupv1="false" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468439 4779 flags.go:64] FLAG: --fail-swap-on="true" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468447 4779 flags.go:64] FLAG: --feature-gates="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468457 4779 flags.go:64] FLAG: --file-check-frequency="20s" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468466 4779 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468475 4779 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468485 4779 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468494 4779 flags.go:64] FLAG: --healthz-port="10248" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468503 4779 flags.go:64] FLAG: --help="false" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468513 4779 flags.go:64] FLAG: --hostname-override="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468521 4779 flags.go:64] FLAG: --housekeeping-interval="10s" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468531 4779 flags.go:64] FLAG: --http-check-frequency="20s" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468540 4779 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468549 4779 flags.go:64] FLAG: --image-credential-provider-config="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468557 4779 flags.go:64] FLAG: --image-gc-high-threshold="85" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468566 4779 flags.go:64] FLAG: --image-gc-low-threshold="80" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468575 4779 flags.go:64] FLAG: --image-service-endpoint="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468584 4779 flags.go:64] FLAG: --kernel-memcg-notification="false" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468593 4779 flags.go:64] FLAG: --kube-api-burst="100" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468601 4779 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468611 4779 flags.go:64] FLAG: --kube-api-qps="50" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468620 4779 flags.go:64] FLAG: --kube-reserved="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468630 4779 flags.go:64] FLAG: --kube-reserved-cgroup="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468638 4779 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468648 4779 flags.go:64] FLAG: --kubelet-cgroups="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468656 4779 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468665 4779 flags.go:64] FLAG: --lock-file="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468675 4779 flags.go:64] FLAG: --log-cadvisor-usage="false" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468684 4779 flags.go:64] FLAG: --log-flush-frequency="5s" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468693 4779 flags.go:64] FLAG: --log-json-info-buffer-size="0" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468705 4779 flags.go:64] FLAG: --log-json-split-stream="false" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468714 4779 flags.go:64] FLAG: --log-text-info-buffer-size="0" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468723 4779 flags.go:64] FLAG: --log-text-split-stream="false" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468732 4779 flags.go:64] FLAG: --logging-format="text" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468741 4779 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468750 4779 flags.go:64] FLAG: --make-iptables-util-chains="true" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468758 4779 flags.go:64] FLAG: --manifest-url="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468767 4779 flags.go:64] FLAG: --manifest-url-header="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468778 4779 flags.go:64] FLAG: --max-housekeeping-interval="15s" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468787 4779 flags.go:64] FLAG: --max-open-files="1000000" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468798 4779 flags.go:64] FLAG: --max-pods="110" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468814 4779 flags.go:64] FLAG: --maximum-dead-containers="-1" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468824 4779 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468833 4779 flags.go:64] FLAG: --memory-manager-policy="None" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468842 4779 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468851 4779 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468860 4779 flags.go:64] FLAG: --node-ip="192.168.126.11" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468869 4779 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468889 4779 flags.go:64] FLAG: --node-status-max-images="50" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468898 4779 flags.go:64] FLAG: --node-status-update-frequency="10s" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468940 4779 flags.go:64] FLAG: --oom-score-adj="-999" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468948 4779 flags.go:64] FLAG: --pod-cidr="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468957 4779 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.468994 4779 flags.go:64] FLAG: --pod-manifest-path="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.469004 4779 flags.go:64] FLAG: --pod-max-pids="-1" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.469013 4779 flags.go:64] FLAG: --pods-per-core="0" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.469022 4779 flags.go:64] FLAG: --port="10250" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.469033 4779 flags.go:64] FLAG: --protect-kernel-defaults="false" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.469042 4779 flags.go:64] FLAG: --provider-id="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.469052 4779 flags.go:64] FLAG: --qos-reserved="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.469061 4779 flags.go:64] FLAG: --read-only-port="10255" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.469070 4779 flags.go:64] FLAG: --register-node="true" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.469079 4779 flags.go:64] FLAG: --register-schedulable="true" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.469089 4779 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.469103 4779 flags.go:64] FLAG: --registry-burst="10" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.469112 4779 flags.go:64] FLAG: --registry-qps="5" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.469121 4779 flags.go:64] FLAG: --reserved-cpus="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.469129 4779 flags.go:64] FLAG: --reserved-memory="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.469141 4779 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.469150 4779 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.469158 4779 flags.go:64] FLAG: --rotate-certificates="false" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.469168 4779 flags.go:64] FLAG: --rotate-server-certificates="false" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.469177 4779 flags.go:64] FLAG: --runonce="false" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.469186 4779 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.469199 4779 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.469208 4779 flags.go:64] FLAG: --seccomp-default="false" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.469217 4779 flags.go:64] FLAG: --serialize-image-pulls="true" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.469226 4779 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.469236 4779 flags.go:64] FLAG: --storage-driver-db="cadvisor" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.469245 4779 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.469254 4779 flags.go:64] FLAG: --storage-driver-password="root" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.469263 4779 flags.go:64] FLAG: --storage-driver-secure="false" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.469272 4779 flags.go:64] FLAG: --storage-driver-table="stats" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.469281 4779 flags.go:64] FLAG: --storage-driver-user="root" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.469290 4779 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.469299 4779 flags.go:64] FLAG: --sync-frequency="1m0s" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.469308 4779 flags.go:64] FLAG: --system-cgroups="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.469316 4779 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.469331 4779 flags.go:64] FLAG: --system-reserved-cgroup="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.469340 4779 flags.go:64] FLAG: --tls-cert-file="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.469348 4779 flags.go:64] FLAG: --tls-cipher-suites="[]" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.469358 4779 flags.go:64] FLAG: --tls-min-version="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.469367 4779 flags.go:64] FLAG: --tls-private-key-file="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.469375 4779 flags.go:64] FLAG: --topology-manager-policy="none" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.469385 4779 flags.go:64] FLAG: --topology-manager-policy-options="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.469393 4779 flags.go:64] FLAG: --topology-manager-scope="container" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.469402 4779 flags.go:64] FLAG: --v="2" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.469413 4779 flags.go:64] FLAG: --version="false" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.469423 4779 flags.go:64] FLAG: --vmodule="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.469435 4779 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.469448 4779 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.469654 4779 feature_gate.go:330] unrecognized feature gate: InsightsConfig Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.469665 4779 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.469674 4779 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.469683 4779 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.469692 4779 feature_gate.go:330] unrecognized feature gate: GatewayAPI Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.469706 4779 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.469717 4779 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.469726 4779 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.469735 4779 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.469745 4779 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.469755 4779 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.469764 4779 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.469774 4779 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.469784 4779 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.469792 4779 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.469802 4779 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.469811 4779 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.469820 4779 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.469828 4779 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.469837 4779 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.469847 4779 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.469856 4779 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.469864 4779 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.469872 4779 feature_gate.go:330] unrecognized feature gate: SignatureStores Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.469880 4779 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.469887 4779 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.469895 4779 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.469928 4779 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.469936 4779 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.469945 4779 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.469953 4779 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.469964 4779 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.469972 4779 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.469980 4779 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.469988 4779 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.469997 4779 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.470004 4779 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.470015 4779 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.470022 4779 feature_gate.go:330] unrecognized feature gate: OVNObservability Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.470030 4779 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.470037 4779 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.470045 4779 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.470053 4779 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.470060 4779 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.470068 4779 feature_gate.go:330] unrecognized feature gate: PinnedImages Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.470075 4779 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.470083 4779 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.470090 4779 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.470098 4779 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.470106 4779 feature_gate.go:330] unrecognized feature gate: Example Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.470114 4779 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.470121 4779 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.470129 4779 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.470136 4779 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.470144 4779 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.470151 4779 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.470159 4779 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.470167 4779 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.470174 4779 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.470182 4779 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.470190 4779 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.470198 4779 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.470205 4779 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.470216 4779 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.470224 4779 feature_gate.go:330] unrecognized feature gate: PlatformOperators Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.470231 4779 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.470239 4779 feature_gate.go:330] unrecognized feature gate: NewOLM Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.470246 4779 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.470254 4779 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.470266 4779 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.470275 4779 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.470300 4779 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.483795 4779 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.483837 4779 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.483975 4779 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.483988 4779 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.483997 4779 feature_gate.go:330] unrecognized feature gate: Example Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484006 4779 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484014 4779 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484022 4779 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484030 4779 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484038 4779 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484051 4779 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484062 4779 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484071 4779 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484079 4779 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484088 4779 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484096 4779 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484107 4779 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484118 4779 feature_gate.go:330] unrecognized feature gate: NewOLM Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484127 4779 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484135 4779 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484145 4779 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484155 4779 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484164 4779 feature_gate.go:330] unrecognized feature gate: OVNObservability Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484172 4779 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484180 4779 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484187 4779 feature_gate.go:330] unrecognized feature gate: PinnedImages Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484195 4779 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484203 4779 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484211 4779 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484219 4779 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484226 4779 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484234 4779 feature_gate.go:330] unrecognized feature gate: PlatformOperators Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484241 4779 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484249 4779 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484258 4779 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484266 4779 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484275 4779 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484283 4779 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484291 4779 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484299 4779 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484306 4779 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484314 4779 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484322 4779 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484331 4779 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484338 4779 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484346 4779 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484354 4779 feature_gate.go:330] unrecognized feature gate: GatewayAPI Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484362 4779 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484369 4779 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484376 4779 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484384 4779 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484392 4779 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484400 4779 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484408 4779 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484415 4779 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484423 4779 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484431 4779 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484439 4779 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484447 4779 feature_gate.go:330] unrecognized feature gate: InsightsConfig Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484455 4779 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484463 4779 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484471 4779 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484479 4779 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484486 4779 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484494 4779 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484501 4779 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484510 4779 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484517 4779 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484525 4779 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484533 4779 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484540 4779 feature_gate.go:330] unrecognized feature gate: SignatureStores Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484550 4779 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484560 4779 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.484573 4779 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484783 4779 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484798 4779 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484807 4779 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484816 4779 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484825 4779 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484834 4779 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484843 4779 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484851 4779 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484858 4779 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484866 4779 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484874 4779 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484882 4779 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484889 4779 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484897 4779 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484928 4779 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484936 4779 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484944 4779 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484951 4779 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484987 4779 feature_gate.go:330] unrecognized feature gate: Example Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.484997 4779 feature_gate.go:330] unrecognized feature gate: PinnedImages Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.485007 4779 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.485015 4779 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.485024 4779 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.485032 4779 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.485041 4779 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.485048 4779 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.485056 4779 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.485063 4779 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.485071 4779 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.485079 4779 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.485087 4779 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.485094 4779 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.485102 4779 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.485113 4779 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.485125 4779 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.485135 4779 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.485143 4779 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.485152 4779 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.485159 4779 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.485168 4779 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.485176 4779 feature_gate.go:330] unrecognized feature gate: NewOLM Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.485184 4779 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.485191 4779 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.485199 4779 feature_gate.go:330] unrecognized feature gate: GatewayAPI Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.485206 4779 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.485214 4779 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.485224 4779 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.485233 4779 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.485242 4779 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.485250 4779 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.485259 4779 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.485266 4779 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.485276 4779 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.485285 4779 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.485293 4779 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.485301 4779 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.485309 4779 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.485317 4779 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.485324 4779 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.485332 4779 feature_gate.go:330] unrecognized feature gate: SignatureStores Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.485340 4779 feature_gate.go:330] unrecognized feature gate: PlatformOperators Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.485348 4779 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.485355 4779 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.485366 4779 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.485374 4779 feature_gate.go:330] unrecognized feature gate: OVNObservability Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.485387 4779 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.485397 4779 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.485405 4779 feature_gate.go:330] unrecognized feature gate: InsightsConfig Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.485413 4779 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.485421 4779 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.485430 4779 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.485442 4779 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.485637 4779 server.go:940] "Client rotation is on, will bootstrap in background" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.491240 4779 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.491368 4779 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.493179 4779 server.go:997] "Starting client certificate rotation" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.493228 4779 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.493484 4779 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2026-01-18 03:44:42.981284327 +0000 UTC Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.493634 4779 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 2658h15m14.487657197s for next certificate rotation Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.527113 4779 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.535986 4779 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.553800 4779 log.go:25] "Validated CRI v1 runtime API" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.595322 4779 log.go:25] "Validated CRI v1 image API" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.597372 4779 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.604008 4779 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-09-29-09-24-16-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.604055 4779 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.627199 4779 manager.go:217] Machine: {Timestamp:2025-09-29 09:29:28.622751743 +0000 UTC m=+0.604075697 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654128640 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:6af97324-aa9b-4cb6-ab41-66056c52c25a BootID:25d3a4e6-deea-47ab-ac6b-f80ccadc03c7 Filesystems:[{Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365412864 Type:vfs Inodes:821634 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108170 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827064320 Type:vfs Inodes:4108170 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:0e:6e:6a Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:0e:6e:6a Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:5e:6b:c3 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:a8:b5:9c Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:65:38:ca Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:32:4b:c4 Speed:-1 Mtu:1496} {Name:ens7.23 MacAddress:52:54:00:41:5b:05 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:02:9a:d6:1a:3a:73 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:fa:54:a4:62:91:25 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654128640 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.627512 4779 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.627745 4779 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.628141 4779 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.628374 4779 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.628416 4779 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.628668 4779 topology_manager.go:138] "Creating topology manager with none policy" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.628680 4779 container_manager_linux.go:303] "Creating device plugin manager" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.629416 4779 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.629464 4779 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.630158 4779 state_mem.go:36] "Initialized new in-memory state store" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.630265 4779 server.go:1245] "Using root directory" path="/var/lib/kubelet" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.635050 4779 kubelet.go:418] "Attempting to sync node with API server" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.635073 4779 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.635100 4779 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.635114 4779 kubelet.go:324] "Adding apiserver pod source" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.635125 4779 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.640611 4779 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.641956 4779 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.644078 4779 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.645840 4779 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.645862 4779 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.645871 4779 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.645911 4779 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.645924 4779 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.645930 4779 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.645937 4779 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.645948 4779 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.645955 4779 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.645962 4779 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.645990 4779 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.645996 4779 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.646006 4779 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.30:6443: connect: connection refused Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.646017 4779 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.30:6443: connect: connection refused Sep 29 09:29:28 crc kubenswrapper[4779]: E0929 09:29:28.646146 4779 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.30:6443: connect: connection refused" logger="UnhandledError" Sep 29 09:29:28 crc kubenswrapper[4779]: E0929 09:29:28.646172 4779 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.30:6443: connect: connection refused" logger="UnhandledError" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.647224 4779 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.647701 4779 server.go:1280] "Started kubelet" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.648496 4779 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.30:6443: connect: connection refused Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.649172 4779 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.649174 4779 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Sep 29 09:29:28 crc systemd[1]: Started Kubernetes Kubelet. Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.650651 4779 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.650862 4779 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.650940 4779 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.651133 4779 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-02 12:08:13.908961605 +0000 UTC Sep 29 09:29:28 crc kubenswrapper[4779]: E0929 09:29:28.651216 4779 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.651287 4779 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 1538h38m45.257741516s for next certificate rotation Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.651362 4779 volume_manager.go:287] "The desired_state_of_world populator starts" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.651372 4779 volume_manager.go:289] "Starting Kubelet Volume Manager" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.651411 4779 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.658241 4779 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.30:6443: connect: connection refused Sep 29 09:29:28 crc kubenswrapper[4779]: E0929 09:29:28.658560 4779 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.30:6443: connect: connection refused" logger="UnhandledError" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.658826 4779 factory.go:55] Registering systemd factory Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.658869 4779 factory.go:221] Registration of the systemd container factory successfully Sep 29 09:29:28 crc kubenswrapper[4779]: E0929 09:29:28.659117 4779 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.30:6443: connect: connection refused" interval="200ms" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.659238 4779 server.go:460] "Adding debug handlers to kubelet server" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.660251 4779 factory.go:153] Registering CRI-O factory Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.660288 4779 factory.go:221] Registration of the crio container factory successfully Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.660367 4779 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.660391 4779 factory.go:103] Registering Raw factory Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.660407 4779 manager.go:1196] Started watching for new ooms in manager Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.664089 4779 manager.go:319] Starting recovery of all containers Sep 29 09:29:28 crc kubenswrapper[4779]: E0929 09:29:28.664675 4779 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.30:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.1869b6d815135439 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-09-29 09:29:28.647668793 +0000 UTC m=+0.628992697,LastTimestamp:2025-09-29 09:29:28.647668793 +0000 UTC m=+0.628992697,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.672031 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.672110 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.672133 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.672154 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.672172 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.672193 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.672211 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.672229 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.672250 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.672268 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.672286 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.672305 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.672323 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.672349 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.672368 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.672386 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.672407 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.672425 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.672445 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.672464 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.672483 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.672502 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.672519 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.672537 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.672580 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.672620 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.672644 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.672664 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.672682 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.672701 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.672719 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.672736 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.672756 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.672774 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.672795 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.672815 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.672835 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.672895 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.673026 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.673045 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.673063 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.673083 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.673102 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.673119 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.673138 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.673156 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.673178 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.673196 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.673220 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.673239 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.673257 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.673277 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.673302 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.673322 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.673340 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.673363 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.673381 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.673399 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.673417 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.673458 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.673477 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.673495 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.673514 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.673531 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.673552 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.673570 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.673589 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.673606 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.673625 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.673642 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.673661 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.673679 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.673697 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.673714 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.673735 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.673755 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.673775 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.673798 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.673821 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.673845 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.673872 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.673895 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.673955 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.673983 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.674010 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.674035 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.674059 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.674076 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.674094 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.674113 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.674133 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.674150 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.674167 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.674185 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.674202 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.674220 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.674240 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.674313 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.674347 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.674363 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.674376 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.674391 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.674405 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.674420 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.674444 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.674463 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.674478 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.674502 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.674519 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.674539 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.674553 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.674586 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.674599 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.674611 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.674624 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.674638 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.674650 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.674663 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.674675 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.674688 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.674699 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.674710 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.674721 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.674733 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.674745 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.674756 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.674767 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.674780 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.674791 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.674804 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.674815 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.674827 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.674839 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.674851 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.674864 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.674876 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.674890 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.674915 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.674927 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.674938 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.674950 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.674964 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.674995 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.675011 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.679303 4779 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.679369 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.679392 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.679408 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.679427 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.679445 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.679463 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.679479 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.679498 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.679516 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.679534 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.679552 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.679573 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.679590 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.679606 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.679621 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.679634 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.679648 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.679661 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.679676 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.679690 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.679703 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.679718 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.679732 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.679746 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.679763 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.679778 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.679795 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.679809 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.679827 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.679841 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.679854 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.679868 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.679885 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.679919 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.679936 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.679953 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.679967 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.679983 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.679999 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.680016 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.680031 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.680048 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.680075 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.680091 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.680108 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.680121 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.680137 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.680151 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.680167 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.680181 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.680196 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.680210 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.680224 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.680241 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.680256 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.680275 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.680292 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.680310 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.680325 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.680340 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.680355 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.680369 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.680384 4779 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.680396 4779 reconstruct.go:97] "Volume reconstruction finished" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.680407 4779 reconciler.go:26] "Reconciler: start to sync state" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.686588 4779 manager.go:324] Recovery completed Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.702747 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.705709 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.705765 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.705779 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.707154 4779 cpu_manager.go:225] "Starting CPU manager" policy="none" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.707175 4779 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.707196 4779 state_mem.go:36] "Initialized new in-memory state store" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.711149 4779 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.712855 4779 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.712911 4779 status_manager.go:217] "Starting to sync pod status with apiserver" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.712936 4779 kubelet.go:2335] "Starting kubelet main sync loop" Sep 29 09:29:28 crc kubenswrapper[4779]: E0929 09:29:28.712980 4779 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Sep 29 09:29:28 crc kubenswrapper[4779]: W0929 09:29:28.713765 4779 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.30:6443: connect: connection refused Sep 29 09:29:28 crc kubenswrapper[4779]: E0929 09:29:28.713830 4779 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.30:6443: connect: connection refused" logger="UnhandledError" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.731803 4779 policy_none.go:49] "None policy: Start" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.733077 4779 memory_manager.go:170] "Starting memorymanager" policy="None" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.733107 4779 state_mem.go:35] "Initializing new in-memory state store" Sep 29 09:29:28 crc kubenswrapper[4779]: E0929 09:29:28.751474 4779 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.800135 4779 manager.go:334] "Starting Device Plugin manager" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.800261 4779 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.800274 4779 server.go:79] "Starting device plugin registration server" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.800667 4779 eviction_manager.go:189] "Eviction manager: starting control loop" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.800688 4779 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.800891 4779 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.800987 4779 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.800995 4779 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Sep 29 09:29:28 crc kubenswrapper[4779]: E0929 09:29:28.808060 4779 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.813250 4779 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.813361 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.814422 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.814465 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.814480 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.814662 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.815147 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.815241 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.815424 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.815441 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.815452 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.815567 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.815842 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.815880 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.816535 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.816614 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.816631 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.816986 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.817095 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.817126 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.817147 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.817155 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.817168 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.817289 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.817310 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.817319 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.818419 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.818441 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.818450 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.818465 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.818522 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.818537 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.818748 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.818859 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.818886 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.819557 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.819587 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.819597 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.820317 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.820355 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.820368 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.820561 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.820593 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.822204 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.822236 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.822247 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:28 crc kubenswrapper[4779]: E0929 09:29:28.860513 4779 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.30:6443: connect: connection refused" interval="400ms" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.881662 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.881691 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.881727 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.881743 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.881758 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.881811 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.881856 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.881889 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.881963 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.881994 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.882099 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.882124 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.882254 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.882332 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.882368 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.901842 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.903437 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.903488 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.903510 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.903552 4779 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 29 09:29:28 crc kubenswrapper[4779]: E0929 09:29:28.904247 4779 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.30:6443: connect: connection refused" node="crc" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.983856 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.983950 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.983999 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.984045 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.984085 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.984133 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.984150 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.984168 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.984176 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.984261 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.984255 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.984320 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.984360 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.984250 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.984398 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.984195 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.984467 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.984560 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.984699 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.984723 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.984777 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.984802 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.984834 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.984865 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.984883 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.985002 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.985024 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.985074 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.985077 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 09:29:28 crc kubenswrapper[4779]: I0929 09:29:28.985128 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 09:29:29 crc kubenswrapper[4779]: I0929 09:29:29.104765 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 09:29:29 crc kubenswrapper[4779]: I0929 09:29:29.106208 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:29 crc kubenswrapper[4779]: I0929 09:29:29.106253 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:29 crc kubenswrapper[4779]: I0929 09:29:29.106272 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:29 crc kubenswrapper[4779]: I0929 09:29:29.106304 4779 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 29 09:29:29 crc kubenswrapper[4779]: E0929 09:29:29.106787 4779 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.30:6443: connect: connection refused" node="crc" Sep 29 09:29:29 crc kubenswrapper[4779]: I0929 09:29:29.147619 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 09:29:29 crc kubenswrapper[4779]: I0929 09:29:29.156364 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Sep 29 09:29:29 crc kubenswrapper[4779]: I0929 09:29:29.182988 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 09:29:29 crc kubenswrapper[4779]: I0929 09:29:29.193308 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 09:29:29 crc kubenswrapper[4779]: I0929 09:29:29.200337 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 29 09:29:29 crc kubenswrapper[4779]: W0929 09:29:29.200652 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-c351fc41980e63ee096805df50bad6290c4fa16bbf9224fd280affc8278f2095 WatchSource:0}: Error finding container c351fc41980e63ee096805df50bad6290c4fa16bbf9224fd280affc8278f2095: Status 404 returned error can't find the container with id c351fc41980e63ee096805df50bad6290c4fa16bbf9224fd280affc8278f2095 Sep 29 09:29:29 crc kubenswrapper[4779]: W0929 09:29:29.201011 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-ba399c5047d62864c035b348ae689486b24f79548e1c781077127dd5326987cd WatchSource:0}: Error finding container ba399c5047d62864c035b348ae689486b24f79548e1c781077127dd5326987cd: Status 404 returned error can't find the container with id ba399c5047d62864c035b348ae689486b24f79548e1c781077127dd5326987cd Sep 29 09:29:29 crc kubenswrapper[4779]: W0929 09:29:29.209304 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-9f03d65bc71d5d3a047af09c4ee8406803c175c9a1eda3a065f062ffa686ce00 WatchSource:0}: Error finding container 9f03d65bc71d5d3a047af09c4ee8406803c175c9a1eda3a065f062ffa686ce00: Status 404 returned error can't find the container with id 9f03d65bc71d5d3a047af09c4ee8406803c175c9a1eda3a065f062ffa686ce00 Sep 29 09:29:29 crc kubenswrapper[4779]: W0929 09:29:29.210230 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-5767b2f81bf6beddabb7b62d932455966b9a76e9e14a3e87f899a99eefe8bd12 WatchSource:0}: Error finding container 5767b2f81bf6beddabb7b62d932455966b9a76e9e14a3e87f899a99eefe8bd12: Status 404 returned error can't find the container with id 5767b2f81bf6beddabb7b62d932455966b9a76e9e14a3e87f899a99eefe8bd12 Sep 29 09:29:29 crc kubenswrapper[4779]: W0929 09:29:29.215689 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-4d5d1cdc06acba388d9a258ea7f47e71e7d0f5609ba4e71b656a4b4de107a34b WatchSource:0}: Error finding container 4d5d1cdc06acba388d9a258ea7f47e71e7d0f5609ba4e71b656a4b4de107a34b: Status 404 returned error can't find the container with id 4d5d1cdc06acba388d9a258ea7f47e71e7d0f5609ba4e71b656a4b4de107a34b Sep 29 09:29:29 crc kubenswrapper[4779]: E0929 09:29:29.262121 4779 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.30:6443: connect: connection refused" interval="800ms" Sep 29 09:29:29 crc kubenswrapper[4779]: I0929 09:29:29.506926 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 09:29:29 crc kubenswrapper[4779]: I0929 09:29:29.508543 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:29 crc kubenswrapper[4779]: I0929 09:29:29.508606 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:29 crc kubenswrapper[4779]: I0929 09:29:29.508625 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:29 crc kubenswrapper[4779]: I0929 09:29:29.508670 4779 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 29 09:29:29 crc kubenswrapper[4779]: E0929 09:29:29.509374 4779 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.30:6443: connect: connection refused" node="crc" Sep 29 09:29:29 crc kubenswrapper[4779]: W0929 09:29:29.567531 4779 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.30:6443: connect: connection refused Sep 29 09:29:29 crc kubenswrapper[4779]: E0929 09:29:29.567614 4779 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.30:6443: connect: connection refused" logger="UnhandledError" Sep 29 09:29:29 crc kubenswrapper[4779]: W0929 09:29:29.607293 4779 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.30:6443: connect: connection refused Sep 29 09:29:29 crc kubenswrapper[4779]: E0929 09:29:29.607408 4779 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.30:6443: connect: connection refused" logger="UnhandledError" Sep 29 09:29:29 crc kubenswrapper[4779]: I0929 09:29:29.649526 4779 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.30:6443: connect: connection refused Sep 29 09:29:29 crc kubenswrapper[4779]: I0929 09:29:29.717966 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"9f03d65bc71d5d3a047af09c4ee8406803c175c9a1eda3a065f062ffa686ce00"} Sep 29 09:29:29 crc kubenswrapper[4779]: I0929 09:29:29.719057 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"ba399c5047d62864c035b348ae689486b24f79548e1c781077127dd5326987cd"} Sep 29 09:29:29 crc kubenswrapper[4779]: I0929 09:29:29.720364 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"c351fc41980e63ee096805df50bad6290c4fa16bbf9224fd280affc8278f2095"} Sep 29 09:29:29 crc kubenswrapper[4779]: I0929 09:29:29.721514 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"4d5d1cdc06acba388d9a258ea7f47e71e7d0f5609ba4e71b656a4b4de107a34b"} Sep 29 09:29:29 crc kubenswrapper[4779]: I0929 09:29:29.725435 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"5767b2f81bf6beddabb7b62d932455966b9a76e9e14a3e87f899a99eefe8bd12"} Sep 29 09:29:29 crc kubenswrapper[4779]: W0929 09:29:29.851580 4779 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.30:6443: connect: connection refused Sep 29 09:29:29 crc kubenswrapper[4779]: E0929 09:29:29.851656 4779 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.30:6443: connect: connection refused" logger="UnhandledError" Sep 29 09:29:29 crc kubenswrapper[4779]: W0929 09:29:29.999512 4779 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.30:6443: connect: connection refused Sep 29 09:29:29 crc kubenswrapper[4779]: E0929 09:29:29.999587 4779 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.30:6443: connect: connection refused" logger="UnhandledError" Sep 29 09:29:30 crc kubenswrapper[4779]: E0929 09:29:30.063683 4779 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.30:6443: connect: connection refused" interval="1.6s" Sep 29 09:29:30 crc kubenswrapper[4779]: I0929 09:29:30.309862 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 09:29:30 crc kubenswrapper[4779]: I0929 09:29:30.311306 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:30 crc kubenswrapper[4779]: I0929 09:29:30.311347 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:30 crc kubenswrapper[4779]: I0929 09:29:30.311362 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:30 crc kubenswrapper[4779]: I0929 09:29:30.311392 4779 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 29 09:29:30 crc kubenswrapper[4779]: E0929 09:29:30.312006 4779 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.30:6443: connect: connection refused" node="crc" Sep 29 09:29:30 crc kubenswrapper[4779]: I0929 09:29:30.649682 4779 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.30:6443: connect: connection refused Sep 29 09:29:30 crc kubenswrapper[4779]: I0929 09:29:30.731832 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"2148aa80d9f71c8424530711e10af5b0bd10437655123ac57b87d1c90a280233"} Sep 29 09:29:30 crc kubenswrapper[4779]: I0929 09:29:30.731921 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"4bb91e73e9e2666ab82d5a13fee96ed7406f6178863603e607766ae3172d3ee6"} Sep 29 09:29:30 crc kubenswrapper[4779]: I0929 09:29:30.731939 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"9ff866eb8fdb247e8bfe638be9525a31733ed912047dcbbc7514ab6324675b6a"} Sep 29 09:29:30 crc kubenswrapper[4779]: I0929 09:29:30.731948 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 09:29:30 crc kubenswrapper[4779]: I0929 09:29:30.731952 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"a69616a6ca151dfda518aae66819aac108f73967794661b878db2480ffbb5b16"} Sep 29 09:29:30 crc kubenswrapper[4779]: I0929 09:29:30.733595 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:30 crc kubenswrapper[4779]: I0929 09:29:30.733606 4779 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="8442bd56bf7c2e6708a3b0f6c21ba803d9bc76a84d32647b507d5961a78e8621" exitCode=0 Sep 29 09:29:30 crc kubenswrapper[4779]: I0929 09:29:30.733646 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"8442bd56bf7c2e6708a3b0f6c21ba803d9bc76a84d32647b507d5961a78e8621"} Sep 29 09:29:30 crc kubenswrapper[4779]: I0929 09:29:30.733629 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:30 crc kubenswrapper[4779]: I0929 09:29:30.733728 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:30 crc kubenswrapper[4779]: I0929 09:29:30.733761 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 09:29:30 crc kubenswrapper[4779]: I0929 09:29:30.734983 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:30 crc kubenswrapper[4779]: I0929 09:29:30.735028 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:30 crc kubenswrapper[4779]: I0929 09:29:30.735046 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:30 crc kubenswrapper[4779]: I0929 09:29:30.735608 4779 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e" exitCode=0 Sep 29 09:29:30 crc kubenswrapper[4779]: I0929 09:29:30.735680 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e"} Sep 29 09:29:30 crc kubenswrapper[4779]: I0929 09:29:30.735785 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 09:29:30 crc kubenswrapper[4779]: I0929 09:29:30.737536 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:30 crc kubenswrapper[4779]: I0929 09:29:30.737569 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:30 crc kubenswrapper[4779]: I0929 09:29:30.737579 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:30 crc kubenswrapper[4779]: I0929 09:29:30.739297 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 09:29:30 crc kubenswrapper[4779]: I0929 09:29:30.739849 4779 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="0c4b0ce933d2a7a4ca883840e6bba48e35e7ae32e487a991ad0571a73fd08ade" exitCode=0 Sep 29 09:29:30 crc kubenswrapper[4779]: I0929 09:29:30.739935 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"0c4b0ce933d2a7a4ca883840e6bba48e35e7ae32e487a991ad0571a73fd08ade"} Sep 29 09:29:30 crc kubenswrapper[4779]: I0929 09:29:30.739966 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 09:29:30 crc kubenswrapper[4779]: I0929 09:29:30.740450 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:30 crc kubenswrapper[4779]: I0929 09:29:30.740495 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:30 crc kubenswrapper[4779]: I0929 09:29:30.740514 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:30 crc kubenswrapper[4779]: I0929 09:29:30.741288 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:30 crc kubenswrapper[4779]: I0929 09:29:30.741341 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:30 crc kubenswrapper[4779]: I0929 09:29:30.741359 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:30 crc kubenswrapper[4779]: I0929 09:29:30.742416 4779 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="aeb35312a74983b7c72a2349d1b461968b67ae4cf725beeaa8699fd1c9c5845e" exitCode=0 Sep 29 09:29:30 crc kubenswrapper[4779]: I0929 09:29:30.742446 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"aeb35312a74983b7c72a2349d1b461968b67ae4cf725beeaa8699fd1c9c5845e"} Sep 29 09:29:30 crc kubenswrapper[4779]: I0929 09:29:30.742527 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 09:29:30 crc kubenswrapper[4779]: I0929 09:29:30.743531 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:30 crc kubenswrapper[4779]: I0929 09:29:30.743630 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:30 crc kubenswrapper[4779]: I0929 09:29:30.743649 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:31 crc kubenswrapper[4779]: W0929 09:29:31.472022 4779 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.30:6443: connect: connection refused Sep 29 09:29:31 crc kubenswrapper[4779]: E0929 09:29:31.472106 4779 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.30:6443: connect: connection refused" logger="UnhandledError" Sep 29 09:29:31 crc kubenswrapper[4779]: I0929 09:29:31.650081 4779 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.30:6443: connect: connection refused Sep 29 09:29:31 crc kubenswrapper[4779]: E0929 09:29:31.664618 4779 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.30:6443: connect: connection refused" interval="3.2s" Sep 29 09:29:31 crc kubenswrapper[4779]: I0929 09:29:31.748274 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"e18a438c1a81da7779de1fc69aeff90acf287ced68bb63bf1faac1ba595ec528"} Sep 29 09:29:31 crc kubenswrapper[4779]: I0929 09:29:31.748374 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"def414c582c214da578be9ef1b899f09185c7b6318cd6b191920d6293e89873f"} Sep 29 09:29:31 crc kubenswrapper[4779]: I0929 09:29:31.748389 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"813ea0b64e4e968f7808a4f272fe8c987b24c23a26e2fd7e1fe2f2bbd3b0498e"} Sep 29 09:29:31 crc kubenswrapper[4779]: I0929 09:29:31.748333 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 09:29:31 crc kubenswrapper[4779]: I0929 09:29:31.749328 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:31 crc kubenswrapper[4779]: I0929 09:29:31.749358 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:31 crc kubenswrapper[4779]: I0929 09:29:31.749369 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:31 crc kubenswrapper[4779]: I0929 09:29:31.750567 4779 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="b03f58ad046bc93040f00ae10e8f265099417fcb84c10b4ca1707d6744729648" exitCode=0 Sep 29 09:29:31 crc kubenswrapper[4779]: I0929 09:29:31.750648 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 09:29:31 crc kubenswrapper[4779]: I0929 09:29:31.750650 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"b03f58ad046bc93040f00ae10e8f265099417fcb84c10b4ca1707d6744729648"} Sep 29 09:29:31 crc kubenswrapper[4779]: I0929 09:29:31.751638 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:31 crc kubenswrapper[4779]: I0929 09:29:31.751664 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:31 crc kubenswrapper[4779]: I0929 09:29:31.751673 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:31 crc kubenswrapper[4779]: I0929 09:29:31.754852 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"6837fc3f6d91505858a90e49945041b663c864b4ebcbd2ecc086c3575b47c1f7"} Sep 29 09:29:31 crc kubenswrapper[4779]: I0929 09:29:31.754929 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 09:29:31 crc kubenswrapper[4779]: I0929 09:29:31.754928 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"55099c1030bd4943e0df0b13b1203c7143f79edf5e02c8353ccd042610e8059c"} Sep 29 09:29:31 crc kubenswrapper[4779]: I0929 09:29:31.755049 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"bde5335a7ca2db0e56ded3f2fc58506882df09718e6ada6dd1849b891ebc3dd4"} Sep 29 09:29:31 crc kubenswrapper[4779]: I0929 09:29:31.755075 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"d0f81c275d3f99ff02d4d105dfff34249cff38150c1aa6199bdc765773f0fa2e"} Sep 29 09:29:31 crc kubenswrapper[4779]: I0929 09:29:31.755090 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"e6f627c2a2c06fbe034514d02939db8b7576b8cb753aa65a45a271482cf138c2"} Sep 29 09:29:31 crc kubenswrapper[4779]: I0929 09:29:31.755664 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:31 crc kubenswrapper[4779]: I0929 09:29:31.755695 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:31 crc kubenswrapper[4779]: I0929 09:29:31.755707 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:31 crc kubenswrapper[4779]: I0929 09:29:31.756342 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"000b9907e5cd9992d56b78dcb66e94850ffd21fe78fd7232d38766489eddc815"} Sep 29 09:29:31 crc kubenswrapper[4779]: I0929 09:29:31.756387 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 09:29:31 crc kubenswrapper[4779]: I0929 09:29:31.756393 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 09:29:31 crc kubenswrapper[4779]: I0929 09:29:31.760160 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:31 crc kubenswrapper[4779]: I0929 09:29:31.760267 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:31 crc kubenswrapper[4779]: I0929 09:29:31.760337 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:31 crc kubenswrapper[4779]: I0929 09:29:31.760582 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:31 crc kubenswrapper[4779]: I0929 09:29:31.760626 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:31 crc kubenswrapper[4779]: I0929 09:29:31.760647 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:31 crc kubenswrapper[4779]: I0929 09:29:31.912650 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 09:29:31 crc kubenswrapper[4779]: I0929 09:29:31.914135 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:31 crc kubenswrapper[4779]: I0929 09:29:31.914170 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:31 crc kubenswrapper[4779]: I0929 09:29:31.914184 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:31 crc kubenswrapper[4779]: I0929 09:29:31.914219 4779 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 29 09:29:31 crc kubenswrapper[4779]: E0929 09:29:31.914671 4779 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.30:6443: connect: connection refused" node="crc" Sep 29 09:29:31 crc kubenswrapper[4779]: W0929 09:29:31.969404 4779 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.30:6443: connect: connection refused Sep 29 09:29:31 crc kubenswrapper[4779]: E0929 09:29:31.969472 4779 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.30:6443: connect: connection refused" logger="UnhandledError" Sep 29 09:29:32 crc kubenswrapper[4779]: I0929 09:29:32.036944 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 09:29:32 crc kubenswrapper[4779]: I0929 09:29:32.075275 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 09:29:32 crc kubenswrapper[4779]: I0929 09:29:32.761861 4779 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="7e2abd0b0dc5d3b95c510100e11f7f2fa4b05786325b692244ea51633d2f597e" exitCode=0 Sep 29 09:29:32 crc kubenswrapper[4779]: I0929 09:29:32.761944 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"7e2abd0b0dc5d3b95c510100e11f7f2fa4b05786325b692244ea51633d2f597e"} Sep 29 09:29:32 crc kubenswrapper[4779]: I0929 09:29:32.762047 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 09:29:32 crc kubenswrapper[4779]: I0929 09:29:32.762071 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 09:29:32 crc kubenswrapper[4779]: I0929 09:29:32.762107 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 09:29:32 crc kubenswrapper[4779]: I0929 09:29:32.762166 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 09:29:32 crc kubenswrapper[4779]: I0929 09:29:32.763091 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 09:29:32 crc kubenswrapper[4779]: I0929 09:29:32.763152 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 09:29:32 crc kubenswrapper[4779]: I0929 09:29:32.763159 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 09:29:32 crc kubenswrapper[4779]: I0929 09:29:32.764086 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:32 crc kubenswrapper[4779]: I0929 09:29:32.764138 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:32 crc kubenswrapper[4779]: I0929 09:29:32.764159 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:32 crc kubenswrapper[4779]: I0929 09:29:32.764138 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:32 crc kubenswrapper[4779]: I0929 09:29:32.764219 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:32 crc kubenswrapper[4779]: I0929 09:29:32.764245 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:32 crc kubenswrapper[4779]: I0929 09:29:32.764410 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:32 crc kubenswrapper[4779]: I0929 09:29:32.764483 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:32 crc kubenswrapper[4779]: I0929 09:29:32.764515 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:32 crc kubenswrapper[4779]: I0929 09:29:32.764508 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:32 crc kubenswrapper[4779]: I0929 09:29:32.764418 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:32 crc kubenswrapper[4779]: I0929 09:29:32.764646 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:32 crc kubenswrapper[4779]: I0929 09:29:32.764661 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:32 crc kubenswrapper[4779]: I0929 09:29:32.764670 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:32 crc kubenswrapper[4779]: I0929 09:29:32.764692 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:33 crc kubenswrapper[4779]: I0929 09:29:33.770166 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"fe3edd15b34d8223ba98808b34bee098cb2ffa1ac4fd3e949007acc04303e370"} Sep 29 09:29:33 crc kubenswrapper[4779]: I0929 09:29:33.770245 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"1ce06a39c8fd0fba9a0bd1af96ff34cfb10a153801ecff617db4df5e64eb476b"} Sep 29 09:29:33 crc kubenswrapper[4779]: I0929 09:29:33.770252 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 09:29:33 crc kubenswrapper[4779]: I0929 09:29:33.770267 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"103d18548d558c96520d7c4c31d3ef44abc2905afa5c26ca3f93d2a1cd6ff604"} Sep 29 09:29:33 crc kubenswrapper[4779]: I0929 09:29:33.770292 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"2cc3416bb385ad0ff953b03c268f186747c47a2dfdf11ba34a3b00be5391e90e"} Sep 29 09:29:33 crc kubenswrapper[4779]: I0929 09:29:33.770252 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 09:29:33 crc kubenswrapper[4779]: I0929 09:29:33.770688 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 09:29:33 crc kubenswrapper[4779]: I0929 09:29:33.771849 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:33 crc kubenswrapper[4779]: I0929 09:29:33.771881 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:33 crc kubenswrapper[4779]: I0929 09:29:33.771893 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:33 crc kubenswrapper[4779]: I0929 09:29:33.771924 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:33 crc kubenswrapper[4779]: I0929 09:29:33.771970 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:33 crc kubenswrapper[4779]: I0929 09:29:33.771987 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:33 crc kubenswrapper[4779]: I0929 09:29:33.772280 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:33 crc kubenswrapper[4779]: I0929 09:29:33.772338 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:33 crc kubenswrapper[4779]: I0929 09:29:33.772359 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:34 crc kubenswrapper[4779]: I0929 09:29:34.147606 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 09:29:34 crc kubenswrapper[4779]: I0929 09:29:34.489782 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 09:29:34 crc kubenswrapper[4779]: I0929 09:29:34.779555 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 09:29:34 crc kubenswrapper[4779]: I0929 09:29:34.780197 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 09:29:34 crc kubenswrapper[4779]: I0929 09:29:34.780522 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"22cc6e6e78a7eaac01a6c53d5e5527ef3120c8c44bf28813e65038f939e79872"} Sep 29 09:29:34 crc kubenswrapper[4779]: I0929 09:29:34.780663 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 09:29:34 crc kubenswrapper[4779]: I0929 09:29:34.781375 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:34 crc kubenswrapper[4779]: I0929 09:29:34.781404 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:34 crc kubenswrapper[4779]: I0929 09:29:34.781418 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:34 crc kubenswrapper[4779]: I0929 09:29:34.781764 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:34 crc kubenswrapper[4779]: I0929 09:29:34.781814 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:34 crc kubenswrapper[4779]: I0929 09:29:34.781862 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:34 crc kubenswrapper[4779]: I0929 09:29:34.781884 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:34 crc kubenswrapper[4779]: I0929 09:29:34.781828 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:34 crc kubenswrapper[4779]: I0929 09:29:34.781992 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:35 crc kubenswrapper[4779]: I0929 09:29:35.063958 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 09:29:35 crc kubenswrapper[4779]: I0929 09:29:35.115446 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 09:29:35 crc kubenswrapper[4779]: I0929 09:29:35.116790 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:35 crc kubenswrapper[4779]: I0929 09:29:35.116853 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:35 crc kubenswrapper[4779]: I0929 09:29:35.116872 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:35 crc kubenswrapper[4779]: I0929 09:29:35.116965 4779 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 29 09:29:35 crc kubenswrapper[4779]: I0929 09:29:35.782972 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 09:29:35 crc kubenswrapper[4779]: I0929 09:29:35.783025 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 09:29:35 crc kubenswrapper[4779]: I0929 09:29:35.784448 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:35 crc kubenswrapper[4779]: I0929 09:29:35.784479 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:35 crc kubenswrapper[4779]: I0929 09:29:35.784487 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:35 crc kubenswrapper[4779]: I0929 09:29:35.784488 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:35 crc kubenswrapper[4779]: I0929 09:29:35.784535 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:35 crc kubenswrapper[4779]: I0929 09:29:35.784563 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:37 crc kubenswrapper[4779]: I0929 09:29:37.490871 4779 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Sep 29 09:29:37 crc kubenswrapper[4779]: I0929 09:29:37.491108 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 09:29:38 crc kubenswrapper[4779]: I0929 09:29:38.285092 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 09:29:38 crc kubenswrapper[4779]: I0929 09:29:38.285611 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 09:29:38 crc kubenswrapper[4779]: I0929 09:29:38.288266 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:38 crc kubenswrapper[4779]: I0929 09:29:38.288357 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:38 crc kubenswrapper[4779]: I0929 09:29:38.288378 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:38 crc kubenswrapper[4779]: I0929 09:29:38.294940 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 09:29:38 crc kubenswrapper[4779]: I0929 09:29:38.316474 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Sep 29 09:29:38 crc kubenswrapper[4779]: I0929 09:29:38.316746 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 09:29:38 crc kubenswrapper[4779]: I0929 09:29:38.318461 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:38 crc kubenswrapper[4779]: I0929 09:29:38.318527 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:38 crc kubenswrapper[4779]: I0929 09:29:38.318547 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:38 crc kubenswrapper[4779]: I0929 09:29:38.793278 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 09:29:38 crc kubenswrapper[4779]: I0929 09:29:38.794748 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:38 crc kubenswrapper[4779]: I0929 09:29:38.794799 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:38 crc kubenswrapper[4779]: I0929 09:29:38.794817 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:38 crc kubenswrapper[4779]: E0929 09:29:38.808277 4779 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Sep 29 09:29:42 crc kubenswrapper[4779]: I0929 09:29:42.024622 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Sep 29 09:29:42 crc kubenswrapper[4779]: I0929 09:29:42.024860 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 09:29:42 crc kubenswrapper[4779]: I0929 09:29:42.026226 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:42 crc kubenswrapper[4779]: I0929 09:29:42.026264 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:42 crc kubenswrapper[4779]: I0929 09:29:42.026274 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:42 crc kubenswrapper[4779]: I0929 09:29:42.041648 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 09:29:42 crc kubenswrapper[4779]: I0929 09:29:42.041759 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 09:29:42 crc kubenswrapper[4779]: I0929 09:29:42.042812 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:42 crc kubenswrapper[4779]: I0929 09:29:42.042840 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:42 crc kubenswrapper[4779]: I0929 09:29:42.042849 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:42 crc kubenswrapper[4779]: I0929 09:29:42.650547 4779 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Sep 29 09:29:42 crc kubenswrapper[4779]: W0929 09:29:42.683859 4779 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout Sep 29 09:29:42 crc kubenswrapper[4779]: I0929 09:29:42.683993 4779 trace.go:236] Trace[448196714]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (29-Sep-2025 09:29:32.682) (total time: 10001ms): Sep 29 09:29:42 crc kubenswrapper[4779]: Trace[448196714]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (09:29:42.683) Sep 29 09:29:42 crc kubenswrapper[4779]: Trace[448196714]: [10.001638324s] [10.001638324s] END Sep 29 09:29:42 crc kubenswrapper[4779]: E0929 09:29:42.684040 4779 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Sep 29 09:29:42 crc kubenswrapper[4779]: I0929 09:29:42.774715 4779 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Sep 29 09:29:42 crc kubenswrapper[4779]: I0929 09:29:42.774843 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Sep 29 09:29:42 crc kubenswrapper[4779]: I0929 09:29:42.792057 4779 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Sep 29 09:29:42 crc kubenswrapper[4779]: I0929 09:29:42.792147 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Sep 29 09:29:42 crc kubenswrapper[4779]: I0929 09:29:42.804837 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Sep 29 09:29:42 crc kubenswrapper[4779]: I0929 09:29:42.806559 4779 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="6837fc3f6d91505858a90e49945041b663c864b4ebcbd2ecc086c3575b47c1f7" exitCode=255 Sep 29 09:29:42 crc kubenswrapper[4779]: I0929 09:29:42.806595 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"6837fc3f6d91505858a90e49945041b663c864b4ebcbd2ecc086c3575b47c1f7"} Sep 29 09:29:42 crc kubenswrapper[4779]: I0929 09:29:42.806723 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 09:29:42 crc kubenswrapper[4779]: I0929 09:29:42.807485 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:42 crc kubenswrapper[4779]: I0929 09:29:42.807523 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:42 crc kubenswrapper[4779]: I0929 09:29:42.807536 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:42 crc kubenswrapper[4779]: I0929 09:29:42.808213 4779 scope.go:117] "RemoveContainer" containerID="6837fc3f6d91505858a90e49945041b663c864b4ebcbd2ecc086c3575b47c1f7" Sep 29 09:29:43 crc kubenswrapper[4779]: I0929 09:29:43.453567 4779 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 09:29:43 crc kubenswrapper[4779]: I0929 09:29:43.811511 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Sep 29 09:29:43 crc kubenswrapper[4779]: I0929 09:29:43.813486 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"f33e68ef001a27a30ef12923f95eaaef45667d18c8dab2997491caf12f595ed0"} Sep 29 09:29:43 crc kubenswrapper[4779]: I0929 09:29:43.813652 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 09:29:43 crc kubenswrapper[4779]: I0929 09:29:43.814626 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:43 crc kubenswrapper[4779]: I0929 09:29:43.814663 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:43 crc kubenswrapper[4779]: I0929 09:29:43.814674 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:44 crc kubenswrapper[4779]: I0929 09:29:44.816370 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 09:29:44 crc kubenswrapper[4779]: I0929 09:29:44.816464 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 09:29:44 crc kubenswrapper[4779]: I0929 09:29:44.817387 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:44 crc kubenswrapper[4779]: I0929 09:29:44.817430 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:44 crc kubenswrapper[4779]: I0929 09:29:44.817449 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:45 crc kubenswrapper[4779]: I0929 09:29:45.071739 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 09:29:45 crc kubenswrapper[4779]: I0929 09:29:45.818895 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 09:29:45 crc kubenswrapper[4779]: I0929 09:29:45.820178 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:45 crc kubenswrapper[4779]: I0929 09:29:45.820228 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:45 crc kubenswrapper[4779]: I0929 09:29:45.820243 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:45 crc kubenswrapper[4779]: I0929 09:29:45.825655 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 09:29:46 crc kubenswrapper[4779]: I0929 09:29:46.820670 4779 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 09:29:46 crc kubenswrapper[4779]: I0929 09:29:46.821387 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:46 crc kubenswrapper[4779]: I0929 09:29:46.821414 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:46 crc kubenswrapper[4779]: I0929 09:29:46.821421 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:47 crc kubenswrapper[4779]: I0929 09:29:47.490187 4779 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Sep 29 09:29:47 crc kubenswrapper[4779]: I0929 09:29:47.490251 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Sep 29 09:29:47 crc kubenswrapper[4779]: E0929 09:29:47.775762 4779 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Sep 29 09:29:47 crc kubenswrapper[4779]: I0929 09:29:47.806813 4779 trace.go:236] Trace[1685935282]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (29-Sep-2025 09:29:35.862) (total time: 11944ms): Sep 29 09:29:47 crc kubenswrapper[4779]: Trace[1685935282]: ---"Objects listed" error: 11944ms (09:29:47.806) Sep 29 09:29:47 crc kubenswrapper[4779]: Trace[1685935282]: [11.944147204s] [11.944147204s] END Sep 29 09:29:47 crc kubenswrapper[4779]: I0929 09:29:47.806843 4779 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Sep 29 09:29:47 crc kubenswrapper[4779]: I0929 09:29:47.809372 4779 trace.go:236] Trace[2112222609]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (29-Sep-2025 09:29:36.224) (total time: 11585ms): Sep 29 09:29:47 crc kubenswrapper[4779]: Trace[2112222609]: ---"Objects listed" error: 11585ms (09:29:47.809) Sep 29 09:29:47 crc kubenswrapper[4779]: Trace[2112222609]: [11.58507741s] [11.58507741s] END Sep 29 09:29:47 crc kubenswrapper[4779]: I0929 09:29:47.809391 4779 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Sep 29 09:29:47 crc kubenswrapper[4779]: I0929 09:29:47.811307 4779 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Sep 29 09:29:47 crc kubenswrapper[4779]: I0929 09:29:47.813630 4779 trace.go:236] Trace[1175641248]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (29-Sep-2025 09:29:33.090) (total time: 14722ms): Sep 29 09:29:47 crc kubenswrapper[4779]: Trace[1175641248]: ---"Objects listed" error: 14722ms (09:29:47.813) Sep 29 09:29:47 crc kubenswrapper[4779]: Trace[1175641248]: [14.722633115s] [14.722633115s] END Sep 29 09:29:47 crc kubenswrapper[4779]: I0929 09:29:47.813656 4779 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Sep 29 09:29:47 crc kubenswrapper[4779]: I0929 09:29:47.813848 4779 kubelet_node_status.go:115] "Node was previously registered" node="crc" Sep 29 09:29:47 crc kubenswrapper[4779]: I0929 09:29:47.814134 4779 kubelet_node_status.go:79] "Successfully registered node" node="crc" Sep 29 09:29:47 crc kubenswrapper[4779]: I0929 09:29:47.815487 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:47 crc kubenswrapper[4779]: I0929 09:29:47.815531 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:47 crc kubenswrapper[4779]: I0929 09:29:47.815543 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:47 crc kubenswrapper[4779]: I0929 09:29:47.815569 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:47 crc kubenswrapper[4779]: I0929 09:29:47.815581 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:47Z","lastTransitionTime":"2025-09-29T09:29:47Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Sep 29 09:29:47 crc kubenswrapper[4779]: E0929 09:29:47.829989 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:29:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:29:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:29:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:29:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:47Z\\\",\\\"message\\\":\\\"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"25d3a4e6-deea-47ab-ac6b-f80ccadc03c7\\\",\\\"systemUUID\\\":\\\"6af97324-aa9b-4cb6-ab41-66056c52c25a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 09:29:47 crc kubenswrapper[4779]: I0929 09:29:47.833254 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:47 crc kubenswrapper[4779]: I0929 09:29:47.833301 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:47 crc kubenswrapper[4779]: I0929 09:29:47.833314 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:47 crc kubenswrapper[4779]: I0929 09:29:47.833340 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:47 crc kubenswrapper[4779]: I0929 09:29:47.833354 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:47Z","lastTransitionTime":"2025-09-29T09:29:47Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Sep 29 09:29:47 crc kubenswrapper[4779]: E0929 09:29:47.842025 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:29:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:29:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:29:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:29:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:47Z\\\",\\\"message\\\":\\\"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"25d3a4e6-deea-47ab-ac6b-f80ccadc03c7\\\",\\\"systemUUID\\\":\\\"6af97324-aa9b-4cb6-ab41-66056c52c25a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 09:29:47 crc kubenswrapper[4779]: I0929 09:29:47.844952 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:47 crc kubenswrapper[4779]: I0929 09:29:47.844990 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:47 crc kubenswrapper[4779]: I0929 09:29:47.845001 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:47 crc kubenswrapper[4779]: I0929 09:29:47.845022 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:47 crc kubenswrapper[4779]: I0929 09:29:47.845035 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:47Z","lastTransitionTime":"2025-09-29T09:29:47Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Sep 29 09:29:47 crc kubenswrapper[4779]: E0929 09:29:47.854022 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:29:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:29:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:29:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:29:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:47Z\\\",\\\"message\\\":\\\"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"25d3a4e6-deea-47ab-ac6b-f80ccadc03c7\\\",\\\"systemUUID\\\":\\\"6af97324-aa9b-4cb6-ab41-66056c52c25a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 09:29:47 crc kubenswrapper[4779]: I0929 09:29:47.856734 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:47 crc kubenswrapper[4779]: I0929 09:29:47.856796 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:47 crc kubenswrapper[4779]: I0929 09:29:47.856811 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:47 crc kubenswrapper[4779]: I0929 09:29:47.856830 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:47 crc kubenswrapper[4779]: I0929 09:29:47.856840 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:47Z","lastTransitionTime":"2025-09-29T09:29:47Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Sep 29 09:29:47 crc kubenswrapper[4779]: E0929 09:29:47.866174 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:29:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:29:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:29:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:29:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:47Z\\\",\\\"message\\\":\\\"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"25d3a4e6-deea-47ab-ac6b-f80ccadc03c7\\\",\\\"systemUUID\\\":\\\"6af97324-aa9b-4cb6-ab41-66056c52c25a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 09:29:47 crc kubenswrapper[4779]: I0929 09:29:47.869224 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:47 crc kubenswrapper[4779]: I0929 09:29:47.869254 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:47 crc kubenswrapper[4779]: I0929 09:29:47.869262 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:47 crc kubenswrapper[4779]: I0929 09:29:47.869282 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:47 crc kubenswrapper[4779]: I0929 09:29:47.869291 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:47Z","lastTransitionTime":"2025-09-29T09:29:47Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Sep 29 09:29:47 crc kubenswrapper[4779]: E0929 09:29:47.908679 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:29:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:29:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:29:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:29:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:47Z\\\",\\\"message\\\":\\\"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"25d3a4e6-deea-47ab-ac6b-f80ccadc03c7\\\",\\\"systemUUID\\\":\\\"6af97324-aa9b-4cb6-ab41-66056c52c25a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 09:29:47 crc kubenswrapper[4779]: E0929 09:29:47.908796 4779 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 29 09:29:47 crc kubenswrapper[4779]: I0929 09:29:47.910195 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:47 crc kubenswrapper[4779]: I0929 09:29:47.910242 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:47 crc kubenswrapper[4779]: I0929 09:29:47.910254 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:47 crc kubenswrapper[4779]: I0929 09:29:47.910275 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:47 crc kubenswrapper[4779]: I0929 09:29:47.910285 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:47Z","lastTransitionTime":"2025-09-29T09:29:47Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.013848 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.013892 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.013922 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.013942 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.013952 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:48Z","lastTransitionTime":"2025-09-29T09:29:48Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.116063 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.116111 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.116124 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.116144 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.116196 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:48Z","lastTransitionTime":"2025-09-29T09:29:48Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.218473 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.218509 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.218518 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.218547 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.218556 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:48Z","lastTransitionTime":"2025-09-29T09:29:48Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.320542 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.320586 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.320598 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.320621 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.320636 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:48Z","lastTransitionTime":"2025-09-29T09:29:48Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.422651 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.422695 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.422705 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.422724 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.422733 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:48Z","lastTransitionTime":"2025-09-29T09:29:48Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.525008 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.525040 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.525058 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.525078 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.525087 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:48Z","lastTransitionTime":"2025-09-29T09:29:48Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.527501 4779 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.626832 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.626871 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.626879 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.626897 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.626927 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:48Z","lastTransitionTime":"2025-09-29T09:29:48Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.648061 4779 apiserver.go:52] "Watching apiserver" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.651047 4779 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.651889 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc","openshift-machine-config-operator/machine-config-daemon-5lnlv","openshift-multus/multus-f2tkr","openshift-network-diagnostics/network-check-target-xd92c","openshift-dns/node-resolver-r5584","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-multus/multus-additional-cni-plugins-twvvx"] Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.652333 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.652550 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:29:48 crc kubenswrapper[4779]: E0929 09:29:48.652620 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.652726 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:29:48 crc kubenswrapper[4779]: E0929 09:29:48.653247 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.653405 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.653545 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.653550 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:29:48 crc kubenswrapper[4779]: E0929 09:29:48.653674 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.654162 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.654200 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-r5584" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.654612 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-f2tkr" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.654865 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-twvvx" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.657663 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.657894 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.657932 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.658142 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.658260 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.658499 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.660614 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.660658 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.660893 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.660985 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.661019 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.661025 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.661076 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.661076 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.661178 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.661240 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.662107 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.662651 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.663467 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.663702 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.664722 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.665611 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.666689 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.668389 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.676682 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.687683 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.705460 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5584" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"564bff56-93cb-42ac-bd34-bbe97f99f411\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvwtv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5584\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.718080 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.718153 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.718193 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.718573 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.719077 4779 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.724375 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.730218 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.730260 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.730270 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.730287 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.730298 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:48Z","lastTransitionTime":"2025-09-29T09:29:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.732666 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2tkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b0e23f7-a478-48e2-a745-193a90e87553\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5rx8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2tkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.741514 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.751435 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-twvvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-twvvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.752353 4779 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.762587 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e291840-9db5-4515-810a-190428263dfb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6f627c2a2c06fbe034514d02939db8b7576b8cb753aa65a45a271482cf138c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde5335a7ca2db0e56ded3f2fc58506882df09718e6ada6dd1849b891ebc3dd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0f81c275d3f99ff02d4d105dfff34249cff38150c1aa6199bdc765773f0fa2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33e68ef001a27a30ef12923f95eaaef45667d18c8dab2997491caf12f595ed0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6837fc3f6d91505858a90e49945041b663c864b4ebcbd2ecc086c3575b47c1f7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T09:29:42Z\\\",\\\"message\\\":\\\"W0929 09:29:31.795697 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 09:29:31.796138 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759138171 cert, and key in /tmp/serving-cert-3245563206/serving-signer.crt, /tmp/serving-cert-3245563206/serving-signer.key\\\\nI0929 09:29:32.157414 1 observer_polling.go:159] Starting file observer\\\\nW0929 09:29:32.160455 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 09:29:32.160736 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 09:29:32.163324 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3245563206/tls.crt::/tmp/serving-cert-3245563206/tls.key\\\\\\\"\\\\nF0929 09:29:42.581603 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55099c1030bd4943e0df0b13b1203c7143f79edf5e02c8353ccd042610e8059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.773492 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.782123 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.789449 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.795545 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1a5d3a7-37d9-4a87-864c-e4af7f504a19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5lnlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.802498 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.810550 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.819038 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.819155 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.819190 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.819209 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.819223 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.819239 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.819258 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.819276 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.819292 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.819310 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.819325 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.819343 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.819372 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.819402 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.819433 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.819454 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.819473 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.819488 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.819503 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.819522 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.819558 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.819574 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.819594 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.819616 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.819618 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.819634 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.819658 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.819680 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.819702 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.819736 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.819753 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.819768 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.819784 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.819799 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.819815 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.819827 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.819831 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.819870 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.819889 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.819930 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.819948 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.819969 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.819986 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.820000 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.820018 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.820034 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.820050 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.820065 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.820112 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.820101 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.820291 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.820501 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.820525 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.820707 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.820826 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.821044 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.821089 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.821122 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.821198 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.821193 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.821284 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.821455 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.821466 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.821372 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.821518 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.821577 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.821644 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.821668 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.821729 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.821834 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.820119 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.821868 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.821884 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.822177 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.822265 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.822376 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.822320 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.822496 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.822602 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.822598 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.822643 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.822644 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.822774 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.822845 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.822855 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.821884 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.823010 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.823030 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.823047 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.823063 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.823088 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.823106 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.823121 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.823145 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.823160 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.823177 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.823192 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.823209 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.823226 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.823271 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.823288 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.823304 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.823319 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.823334 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.823350 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.823366 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.823386 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.823404 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.823418 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.823433 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.823448 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.823464 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.823481 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.823496 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.823511 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.823527 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.823543 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.823558 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.823573 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.823592 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.823616 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.823631 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.823650 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.823675 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.823691 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.823706 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.823720 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.823782 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.823815 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.823831 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.823846 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.823863 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.823879 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.823895 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.823926 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.823941 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.823957 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.823972 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.823987 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.824004 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.824019 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.824035 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.824051 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.824066 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.824080 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.824096 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.824109 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.824128 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.824145 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.824160 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.824175 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.824191 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.824209 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.824225 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.824240 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.824256 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.824271 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.824286 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.824302 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.824317 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.824332 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.824348 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.824364 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.824382 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.824398 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.824413 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.824431 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.824447 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.824462 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.824479 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.824495 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.824510 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.824525 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.824541 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.824556 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.824571 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.824591 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.824612 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.824634 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.824653 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.824669 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.824685 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.824700 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.824716 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.824732 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.824748 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.824762 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.824778 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.824796 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.824814 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.824831 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.824846 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.824863 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.824878 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.824894 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.824939 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.824955 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.824971 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.824987 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.825003 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.825019 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.825035 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.825055 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.825077 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.825101 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.825127 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.825150 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.825173 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.825197 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.825219 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.825240 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.825265 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.825289 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.825310 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.825333 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.825360 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.825383 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.825406 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.825427 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.825451 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.825492 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.825519 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.825544 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.825565 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.825586 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.825609 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.825630 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.825653 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.825674 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.825694 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.825729 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.825749 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.825778 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.825801 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.825822 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.825878 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.825930 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/f1a5d3a7-37d9-4a87-864c-e4af7f504a19-rootfs\") pod \"machine-config-daemon-5lnlv\" (UID: \"f1a5d3a7-37d9-4a87-864c-e4af7f504a19\") " pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.825957 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/6b0e23f7-a478-48e2-a745-193a90e87553-host-var-lib-cni-multus\") pod \"multus-f2tkr\" (UID: \"6b0e23f7-a478-48e2-a745-193a90e87553\") " pod="openshift-multus/multus-f2tkr" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.825988 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.826014 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/6b0e23f7-a478-48e2-a745-193a90e87553-multus-cni-dir\") pod \"multus-f2tkr\" (UID: \"6b0e23f7-a478-48e2-a745-193a90e87553\") " pod="openshift-multus/multus-f2tkr" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.826036 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/95f4faf4-0a02-4440-ad6d-2ab0fae56bb6-os-release\") pod \"multus-additional-cni-plugins-twvvx\" (UID: \"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6\") " pod="openshift-multus/multus-additional-cni-plugins-twvvx" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.826051 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.826070 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.826123 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.826142 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nvwtv\" (UniqueName: \"kubernetes.io/projected/564bff56-93cb-42ac-bd34-bbe97f99f411-kube-api-access-nvwtv\") pod \"node-resolver-r5584\" (UID: \"564bff56-93cb-42ac-bd34-bbe97f99f411\") " pod="openshift-dns/node-resolver-r5584" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.826168 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.826193 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/6b0e23f7-a478-48e2-a745-193a90e87553-cnibin\") pod \"multus-f2tkr\" (UID: \"6b0e23f7-a478-48e2-a745-193a90e87553\") " pod="openshift-multus/multus-f2tkr" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.826215 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zr62p\" (UniqueName: \"kubernetes.io/projected/95f4faf4-0a02-4440-ad6d-2ab0fae56bb6-kube-api-access-zr62p\") pod \"multus-additional-cni-plugins-twvvx\" (UID: \"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6\") " pod="openshift-multus/multus-additional-cni-plugins-twvvx" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.826232 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/95f4faf4-0a02-4440-ad6d-2ab0fae56bb6-tuning-conf-dir\") pod \"multus-additional-cni-plugins-twvvx\" (UID: \"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6\") " pod="openshift-multus/multus-additional-cni-plugins-twvvx" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.826251 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.826267 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/95f4faf4-0a02-4440-ad6d-2ab0fae56bb6-system-cni-dir\") pod \"multus-additional-cni-plugins-twvvx\" (UID: \"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6\") " pod="openshift-multus/multus-additional-cni-plugins-twvvx" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.826282 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/95f4faf4-0a02-4440-ad6d-2ab0fae56bb6-cnibin\") pod \"multus-additional-cni-plugins-twvvx\" (UID: \"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6\") " pod="openshift-multus/multus-additional-cni-plugins-twvvx" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.826298 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7ndnj\" (UniqueName: \"kubernetes.io/projected/f1a5d3a7-37d9-4a87-864c-e4af7f504a19-kube-api-access-7ndnj\") pod \"machine-config-daemon-5lnlv\" (UID: \"f1a5d3a7-37d9-4a87-864c-e4af7f504a19\") " pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.826316 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/6b0e23f7-a478-48e2-a745-193a90e87553-multus-daemon-config\") pod \"multus-f2tkr\" (UID: \"6b0e23f7-a478-48e2-a745-193a90e87553\") " pod="openshift-multus/multus-f2tkr" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.826356 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/564bff56-93cb-42ac-bd34-bbe97f99f411-hosts-file\") pod \"node-resolver-r5584\" (UID: \"564bff56-93cb-42ac-bd34-bbe97f99f411\") " pod="openshift-dns/node-resolver-r5584" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.826374 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/6b0e23f7-a478-48e2-a745-193a90e87553-host-var-lib-cni-bin\") pod \"multus-f2tkr\" (UID: \"6b0e23f7-a478-48e2-a745-193a90e87553\") " pod="openshift-multus/multus-f2tkr" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.826396 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.826412 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f1a5d3a7-37d9-4a87-864c-e4af7f504a19-mcd-auth-proxy-config\") pod \"machine-config-daemon-5lnlv\" (UID: \"f1a5d3a7-37d9-4a87-864c-e4af7f504a19\") " pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.826429 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/6b0e23f7-a478-48e2-a745-193a90e87553-host-run-k8s-cni-cncf-io\") pod \"multus-f2tkr\" (UID: \"6b0e23f7-a478-48e2-a745-193a90e87553\") " pod="openshift-multus/multus-f2tkr" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.826446 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/6b0e23f7-a478-48e2-a745-193a90e87553-multus-conf-dir\") pod \"multus-f2tkr\" (UID: \"6b0e23f7-a478-48e2-a745-193a90e87553\") " pod="openshift-multus/multus-f2tkr" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.826460 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/95f4faf4-0a02-4440-ad6d-2ab0fae56bb6-cni-binary-copy\") pod \"multus-additional-cni-plugins-twvvx\" (UID: \"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6\") " pod="openshift-multus/multus-additional-cni-plugins-twvvx" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.826476 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/6b0e23f7-a478-48e2-a745-193a90e87553-cni-binary-copy\") pod \"multus-f2tkr\" (UID: \"6b0e23f7-a478-48e2-a745-193a90e87553\") " pod="openshift-multus/multus-f2tkr" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.826492 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/6b0e23f7-a478-48e2-a745-193a90e87553-host-var-lib-kubelet\") pod \"multus-f2tkr\" (UID: \"6b0e23f7-a478-48e2-a745-193a90e87553\") " pod="openshift-multus/multus-f2tkr" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.826507 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/6b0e23f7-a478-48e2-a745-193a90e87553-etc-kubernetes\") pod \"multus-f2tkr\" (UID: \"6b0e23f7-a478-48e2-a745-193a90e87553\") " pod="openshift-multus/multus-f2tkr" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.826533 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/6b0e23f7-a478-48e2-a745-193a90e87553-multus-socket-dir-parent\") pod \"multus-f2tkr\" (UID: \"6b0e23f7-a478-48e2-a745-193a90e87553\") " pod="openshift-multus/multus-f2tkr" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.826550 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x5rx8\" (UniqueName: \"kubernetes.io/projected/6b0e23f7-a478-48e2-a745-193a90e87553-kube-api-access-x5rx8\") pod \"multus-f2tkr\" (UID: \"6b0e23f7-a478-48e2-a745-193a90e87553\") " pod="openshift-multus/multus-f2tkr" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.826566 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/6b0e23f7-a478-48e2-a745-193a90e87553-system-cni-dir\") pod \"multus-f2tkr\" (UID: \"6b0e23f7-a478-48e2-a745-193a90e87553\") " pod="openshift-multus/multus-f2tkr" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.826587 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.826613 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/6b0e23f7-a478-48e2-a745-193a90e87553-os-release\") pod \"multus-f2tkr\" (UID: \"6b0e23f7-a478-48e2-a745-193a90e87553\") " pod="openshift-multus/multus-f2tkr" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.826629 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/95f4faf4-0a02-4440-ad6d-2ab0fae56bb6-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-twvvx\" (UID: \"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6\") " pod="openshift-multus/multus-additional-cni-plugins-twvvx" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.826646 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/f1a5d3a7-37d9-4a87-864c-e4af7f504a19-proxy-tls\") pod \"machine-config-daemon-5lnlv\" (UID: \"f1a5d3a7-37d9-4a87-864c-e4af7f504a19\") " pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.826685 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.826709 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/6b0e23f7-a478-48e2-a745-193a90e87553-host-run-netns\") pod \"multus-f2tkr\" (UID: \"6b0e23f7-a478-48e2-a745-193a90e87553\") " pod="openshift-multus/multus-f2tkr" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.826729 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/6b0e23f7-a478-48e2-a745-193a90e87553-hostroot\") pod \"multus-f2tkr\" (UID: \"6b0e23f7-a478-48e2-a745-193a90e87553\") " pod="openshift-multus/multus-f2tkr" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.826746 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/6b0e23f7-a478-48e2-a745-193a90e87553-host-run-multus-certs\") pod \"multus-f2tkr\" (UID: \"6b0e23f7-a478-48e2-a745-193a90e87553\") " pod="openshift-multus/multus-f2tkr" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.826764 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.826782 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.826831 4779 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.826842 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.826853 4779 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.826863 4779 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.826874 4779 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.826884 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.826894 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.826920 4779 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.826934 4779 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.826946 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.826959 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.826973 4779 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.826987 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.827006 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.827016 4779 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.827026 4779 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.827036 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.827045 4779 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.827055 4779 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.827064 4779 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.827075 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.827086 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.827097 4779 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.827106 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.827117 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.827127 4779 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.827137 4779 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.827147 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.827160 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.827170 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.827179 4779 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.827189 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.827198 4779 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.827208 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.827217 4779 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.827228 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.827238 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.827248 4779 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.827258 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.829759 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.829932 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.830107 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.830185 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.830556 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.830703 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.830761 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.830771 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.830851 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.830949 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.830973 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.831248 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.831354 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: E0929 09:29:48.831431 4779 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.831514 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.831580 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: E0929 09:29:48.831602 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 09:29:49.331532981 +0000 UTC m=+21.312856885 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.831618 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.831737 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: E0929 09:29:48.831832 4779 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 09:29:48 crc kubenswrapper[4779]: E0929 09:29:48.831893 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 09:29:49.331885821 +0000 UTC m=+21.313209715 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.832006 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.831974 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.832189 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.832333 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.831519 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: E0929 09:29:48.832458 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 09:29:49.332434408 +0000 UTC m=+21.313758412 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.832080 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.832926 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.833205 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.833336 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.833520 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.833681 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.833782 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.834234 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.834685 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.834796 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.835010 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.835359 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.835251 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.835410 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.835667 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.835674 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.836337 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.836355 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.836505 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.836696 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.836637 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.836845 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.836974 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.837244 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.837358 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.837455 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.837830 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.838213 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.838805 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.838832 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.839070 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.839355 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.839545 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.840192 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.840466 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.840535 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.841243 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.842104 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.842272 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.842445 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.842831 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.842855 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.842869 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.842883 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.842892 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:48Z","lastTransitionTime":"2025-09-29T09:29:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.844138 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.856217 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.856317 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.856739 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.857126 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.859210 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.859516 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.859546 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.860014 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.860599 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.860783 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.861496 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.861986 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.862271 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.862422 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.862522 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.862586 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.862728 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.862731 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.863022 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.863134 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.863481 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.863587 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.863740 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.864014 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.864164 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.864384 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.864530 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.864540 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.864798 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.865059 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.864865 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.865136 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.865165 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.865283 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.865396 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.865539 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.865578 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.865820 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.866077 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.866296 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.866422 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.866412 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-ncxc4"] Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.866657 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.866955 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.867199 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.867443 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.867977 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.868288 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.868593 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.868863 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.869001 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.869287 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.869731 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.870525 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.871079 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.872761 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.872813 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.873003 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.873445 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.873572 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.873629 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.874004 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.874537 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.874707 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.875008 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.875029 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.875100 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.875247 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.875372 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.875453 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.875811 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.876133 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.876161 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.876466 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.876734 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.876987 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.877218 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.877440 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.877443 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.877511 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.877572 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.877768 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.877740 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.878123 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.878384 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: E0929 09:29:48.878433 4779 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 09:29:48 crc kubenswrapper[4779]: E0929 09:29:48.878650 4779 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 09:29:48 crc kubenswrapper[4779]: E0929 09:29:48.878770 4779 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.879141 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: E0929 09:29:48.879205 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-29 09:29:49.379173885 +0000 UTC m=+21.360497789 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.878472 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.878650 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.878816 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.879674 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.880094 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.879998 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.880431 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.881021 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.881880 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.882566 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.883081 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.883079 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.883482 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.883725 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.883917 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.885424 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.887033 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.887395 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.888836 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 09:29:48 crc kubenswrapper[4779]: E0929 09:29:48.889962 4779 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 09:29:48 crc kubenswrapper[4779]: E0929 09:29:48.889988 4779 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 09:29:48 crc kubenswrapper[4779]: E0929 09:29:48.890002 4779 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 09:29:48 crc kubenswrapper[4779]: E0929 09:29:48.890052 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-29 09:29:49.390038132 +0000 UTC m=+21.371362036 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.890438 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.891788 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.898729 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2tkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b0e23f7-a478-48e2-a745-193a90e87553\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5rx8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2tkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.899681 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.910151 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-twvvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-twvvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.919830 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.920023 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.921934 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e291840-9db5-4515-810a-190428263dfb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6f627c2a2c06fbe034514d02939db8b7576b8cb753aa65a45a271482cf138c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde5335a7ca2db0e56ded3f2fc58506882df09718e6ada6dd1849b891ebc3dd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0f81c275d3f99ff02d4d105dfff34249cff38150c1aa6199bdc765773f0fa2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33e68ef001a27a30ef12923f95eaaef45667d18c8dab2997491caf12f595ed0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6837fc3f6d91505858a90e49945041b663c864b4ebcbd2ecc086c3575b47c1f7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T09:29:42Z\\\",\\\"message\\\":\\\"W0929 09:29:31.795697 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 09:29:31.796138 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759138171 cert, and key in /tmp/serving-cert-3245563206/serving-signer.crt, /tmp/serving-cert-3245563206/serving-signer.key\\\\nI0929 09:29:32.157414 1 observer_polling.go:159] Starting file observer\\\\nW0929 09:29:32.160455 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 09:29:32.160736 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 09:29:32.163324 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3245563206/tls.crt::/tmp/serving-cert-3245563206/tls.key\\\\\\\"\\\\nF0929 09:29:42.581603 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55099c1030bd4943e0df0b13b1203c7143f79edf5e02c8353ccd042610e8059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.927643 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/6b0e23f7-a478-48e2-a745-193a90e87553-hostroot\") pod \"multus-f2tkr\" (UID: \"6b0e23f7-a478-48e2-a745-193a90e87553\") " pod="openshift-multus/multus-f2tkr" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.927678 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/6b0e23f7-a478-48e2-a745-193a90e87553-host-run-multus-certs\") pod \"multus-f2tkr\" (UID: \"6b0e23f7-a478-48e2-a745-193a90e87553\") " pod="openshift-multus/multus-f2tkr" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.927706 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/f1a5d3a7-37d9-4a87-864c-e4af7f504a19-rootfs\") pod \"machine-config-daemon-5lnlv\" (UID: \"f1a5d3a7-37d9-4a87-864c-e4af7f504a19\") " pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.927727 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/6b0e23f7-a478-48e2-a745-193a90e87553-host-var-lib-cni-multus\") pod \"multus-f2tkr\" (UID: \"6b0e23f7-a478-48e2-a745-193a90e87553\") " pod="openshift-multus/multus-f2tkr" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.927747 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/6b0e23f7-a478-48e2-a745-193a90e87553-multus-cni-dir\") pod \"multus-f2tkr\" (UID: \"6b0e23f7-a478-48e2-a745-193a90e87553\") " pod="openshift-multus/multus-f2tkr" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.927766 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/95f4faf4-0a02-4440-ad6d-2ab0fae56bb6-os-release\") pod \"multus-additional-cni-plugins-twvvx\" (UID: \"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6\") " pod="openshift-multus/multus-additional-cni-plugins-twvvx" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.927787 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.927808 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nvwtv\" (UniqueName: \"kubernetes.io/projected/564bff56-93cb-42ac-bd34-bbe97f99f411-kube-api-access-nvwtv\") pod \"node-resolver-r5584\" (UID: \"564bff56-93cb-42ac-bd34-bbe97f99f411\") " pod="openshift-dns/node-resolver-r5584" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.927848 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/6b0e23f7-a478-48e2-a745-193a90e87553-cnibin\") pod \"multus-f2tkr\" (UID: \"6b0e23f7-a478-48e2-a745-193a90e87553\") " pod="openshift-multus/multus-f2tkr" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.927872 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zr62p\" (UniqueName: \"kubernetes.io/projected/95f4faf4-0a02-4440-ad6d-2ab0fae56bb6-kube-api-access-zr62p\") pod \"multus-additional-cni-plugins-twvvx\" (UID: \"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6\") " pod="openshift-multus/multus-additional-cni-plugins-twvvx" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.927947 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/95f4faf4-0a02-4440-ad6d-2ab0fae56bb6-tuning-conf-dir\") pod \"multus-additional-cni-plugins-twvvx\" (UID: \"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6\") " pod="openshift-multus/multus-additional-cni-plugins-twvvx" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.927947 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/f1a5d3a7-37d9-4a87-864c-e4af7f504a19-rootfs\") pod \"machine-config-daemon-5lnlv\" (UID: \"f1a5d3a7-37d9-4a87-864c-e4af7f504a19\") " pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.927985 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7ndnj\" (UniqueName: \"kubernetes.io/projected/f1a5d3a7-37d9-4a87-864c-e4af7f504a19-kube-api-access-7ndnj\") pod \"machine-config-daemon-5lnlv\" (UID: \"f1a5d3a7-37d9-4a87-864c-e4af7f504a19\") " pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.927985 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.928034 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/6b0e23f7-a478-48e2-a745-193a90e87553-multus-daemon-config\") pod \"multus-f2tkr\" (UID: \"6b0e23f7-a478-48e2-a745-193a90e87553\") " pod="openshift-multus/multus-f2tkr" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.928058 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/95f4faf4-0a02-4440-ad6d-2ab0fae56bb6-system-cni-dir\") pod \"multus-additional-cni-plugins-twvvx\" (UID: \"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6\") " pod="openshift-multus/multus-additional-cni-plugins-twvvx" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.928076 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/95f4faf4-0a02-4440-ad6d-2ab0fae56bb6-cnibin\") pod \"multus-additional-cni-plugins-twvvx\" (UID: \"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6\") " pod="openshift-multus/multus-additional-cni-plugins-twvvx" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.928102 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f1a5d3a7-37d9-4a87-864c-e4af7f504a19-mcd-auth-proxy-config\") pod \"machine-config-daemon-5lnlv\" (UID: \"f1a5d3a7-37d9-4a87-864c-e4af7f504a19\") " pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.928121 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/564bff56-93cb-42ac-bd34-bbe97f99f411-hosts-file\") pod \"node-resolver-r5584\" (UID: \"564bff56-93cb-42ac-bd34-bbe97f99f411\") " pod="openshift-dns/node-resolver-r5584" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.928136 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/6b0e23f7-a478-48e2-a745-193a90e87553-host-var-lib-cni-bin\") pod \"multus-f2tkr\" (UID: \"6b0e23f7-a478-48e2-a745-193a90e87553\") " pod="openshift-multus/multus-f2tkr" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.928152 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/6b0e23f7-a478-48e2-a745-193a90e87553-cni-binary-copy\") pod \"multus-f2tkr\" (UID: \"6b0e23f7-a478-48e2-a745-193a90e87553\") " pod="openshift-multus/multus-f2tkr" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.928167 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/6b0e23f7-a478-48e2-a745-193a90e87553-host-run-k8s-cni-cncf-io\") pod \"multus-f2tkr\" (UID: \"6b0e23f7-a478-48e2-a745-193a90e87553\") " pod="openshift-multus/multus-f2tkr" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.928181 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/6b0e23f7-a478-48e2-a745-193a90e87553-multus-conf-dir\") pod \"multus-f2tkr\" (UID: \"6b0e23f7-a478-48e2-a745-193a90e87553\") " pod="openshift-multus/multus-f2tkr" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.928240 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/95f4faf4-0a02-4440-ad6d-2ab0fae56bb6-cni-binary-copy\") pod \"multus-additional-cni-plugins-twvvx\" (UID: \"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6\") " pod="openshift-multus/multus-additional-cni-plugins-twvvx" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.928258 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/6b0e23f7-a478-48e2-a745-193a90e87553-multus-socket-dir-parent\") pod \"multus-f2tkr\" (UID: \"6b0e23f7-a478-48e2-a745-193a90e87553\") " pod="openshift-multus/multus-f2tkr" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.928274 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/6b0e23f7-a478-48e2-a745-193a90e87553-host-var-lib-kubelet\") pod \"multus-f2tkr\" (UID: \"6b0e23f7-a478-48e2-a745-193a90e87553\") " pod="openshift-multus/multus-f2tkr" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.928242 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1a5d3a7-37d9-4a87-864c-e4af7f504a19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5lnlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.928289 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/6b0e23f7-a478-48e2-a745-193a90e87553-etc-kubernetes\") pod \"multus-f2tkr\" (UID: \"6b0e23f7-a478-48e2-a745-193a90e87553\") " pod="openshift-multus/multus-f2tkr" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.928307 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/6b0e23f7-a478-48e2-a745-193a90e87553-system-cni-dir\") pod \"multus-f2tkr\" (UID: \"6b0e23f7-a478-48e2-a745-193a90e87553\") " pod="openshift-multus/multus-f2tkr" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.928317 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/95f4faf4-0a02-4440-ad6d-2ab0fae56bb6-os-release\") pod \"multus-additional-cni-plugins-twvvx\" (UID: \"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6\") " pod="openshift-multus/multus-additional-cni-plugins-twvvx" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.928322 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x5rx8\" (UniqueName: \"kubernetes.io/projected/6b0e23f7-a478-48e2-a745-193a90e87553-kube-api-access-x5rx8\") pod \"multus-f2tkr\" (UID: \"6b0e23f7-a478-48e2-a745-193a90e87553\") " pod="openshift-multus/multus-f2tkr" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.928400 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/f1a5d3a7-37d9-4a87-864c-e4af7f504a19-proxy-tls\") pod \"machine-config-daemon-5lnlv\" (UID: \"f1a5d3a7-37d9-4a87-864c-e4af7f504a19\") " pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.928415 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/6b0e23f7-a478-48e2-a745-193a90e87553-multus-cni-dir\") pod \"multus-f2tkr\" (UID: \"6b0e23f7-a478-48e2-a745-193a90e87553\") " pod="openshift-multus/multus-f2tkr" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.928438 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/6b0e23f7-a478-48e2-a745-193a90e87553-hostroot\") pod \"multus-f2tkr\" (UID: \"6b0e23f7-a478-48e2-a745-193a90e87553\") " pod="openshift-multus/multus-f2tkr" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.928440 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/6b0e23f7-a478-48e2-a745-193a90e87553-os-release\") pod \"multus-f2tkr\" (UID: \"6b0e23f7-a478-48e2-a745-193a90e87553\") " pod="openshift-multus/multus-f2tkr" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.928492 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/6b0e23f7-a478-48e2-a745-193a90e87553-multus-conf-dir\") pod \"multus-f2tkr\" (UID: \"6b0e23f7-a478-48e2-a745-193a90e87553\") " pod="openshift-multus/multus-f2tkr" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.928521 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/95f4faf4-0a02-4440-ad6d-2ab0fae56bb6-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-twvvx\" (UID: \"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6\") " pod="openshift-multus/multus-additional-cni-plugins-twvvx" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.928526 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/6b0e23f7-a478-48e2-a745-193a90e87553-etc-kubernetes\") pod \"multus-f2tkr\" (UID: \"6b0e23f7-a478-48e2-a745-193a90e87553\") " pod="openshift-multus/multus-f2tkr" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.928558 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/6b0e23f7-a478-48e2-a745-193a90e87553-host-run-netns\") pod \"multus-f2tkr\" (UID: \"6b0e23f7-a478-48e2-a745-193a90e87553\") " pod="openshift-multus/multus-f2tkr" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.928593 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/564bff56-93cb-42ac-bd34-bbe97f99f411-hosts-file\") pod \"node-resolver-r5584\" (UID: \"564bff56-93cb-42ac-bd34-bbe97f99f411\") " pod="openshift-dns/node-resolver-r5584" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.928387 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/6b0e23f7-a478-48e2-a745-193a90e87553-cnibin\") pod \"multus-f2tkr\" (UID: \"6b0e23f7-a478-48e2-a745-193a90e87553\") " pod="openshift-multus/multus-f2tkr" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.928769 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/6b0e23f7-a478-48e2-a745-193a90e87553-os-release\") pod \"multus-f2tkr\" (UID: \"6b0e23f7-a478-48e2-a745-193a90e87553\") " pod="openshift-multus/multus-f2tkr" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.929063 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/95f4faf4-0a02-4440-ad6d-2ab0fae56bb6-tuning-conf-dir\") pod \"multus-additional-cni-plugins-twvvx\" (UID: \"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6\") " pod="openshift-multus/multus-additional-cni-plugins-twvvx" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.929092 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/95f4faf4-0a02-4440-ad6d-2ab0fae56bb6-system-cni-dir\") pod \"multus-additional-cni-plugins-twvvx\" (UID: \"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6\") " pod="openshift-multus/multus-additional-cni-plugins-twvvx" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.929119 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/95f4faf4-0a02-4440-ad6d-2ab0fae56bb6-cnibin\") pod \"multus-additional-cni-plugins-twvvx\" (UID: \"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6\") " pod="openshift-multus/multus-additional-cni-plugins-twvvx" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.929191 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f1a5d3a7-37d9-4a87-864c-e4af7f504a19-mcd-auth-proxy-config\") pod \"machine-config-daemon-5lnlv\" (UID: \"f1a5d3a7-37d9-4a87-864c-e4af7f504a19\") " pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.929270 4779 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.929270 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/95f4faf4-0a02-4440-ad6d-2ab0fae56bb6-cni-binary-copy\") pod \"multus-additional-cni-plugins-twvvx\" (UID: \"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6\") " pod="openshift-multus/multus-additional-cni-plugins-twvvx" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.929292 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/6b0e23f7-a478-48e2-a745-193a90e87553-host-var-lib-cni-bin\") pod \"multus-f2tkr\" (UID: \"6b0e23f7-a478-48e2-a745-193a90e87553\") " pod="openshift-multus/multus-f2tkr" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.929320 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/6b0e23f7-a478-48e2-a745-193a90e87553-system-cni-dir\") pod \"multus-f2tkr\" (UID: \"6b0e23f7-a478-48e2-a745-193a90e87553\") " pod="openshift-multus/multus-f2tkr" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.929328 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/6b0e23f7-a478-48e2-a745-193a90e87553-multus-socket-dir-parent\") pod \"multus-f2tkr\" (UID: \"6b0e23f7-a478-48e2-a745-193a90e87553\") " pod="openshift-multus/multus-f2tkr" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.929741 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/6b0e23f7-a478-48e2-a745-193a90e87553-host-var-lib-kubelet\") pod \"multus-f2tkr\" (UID: \"6b0e23f7-a478-48e2-a745-193a90e87553\") " pod="openshift-multus/multus-f2tkr" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.929767 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/6b0e23f7-a478-48e2-a745-193a90e87553-host-run-netns\") pod \"multus-f2tkr\" (UID: \"6b0e23f7-a478-48e2-a745-193a90e87553\") " pod="openshift-multus/multus-f2tkr" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.929788 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/6b0e23f7-a478-48e2-a745-193a90e87553-cni-binary-copy\") pod \"multus-f2tkr\" (UID: \"6b0e23f7-a478-48e2-a745-193a90e87553\") " pod="openshift-multus/multus-f2tkr" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.929803 4779 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.929823 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.929806 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/6b0e23f7-a478-48e2-a745-193a90e87553-host-run-k8s-cni-cncf-io\") pod \"multus-f2tkr\" (UID: \"6b0e23f7-a478-48e2-a745-193a90e87553\") " pod="openshift-multus/multus-f2tkr" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.929826 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/6b0e23f7-a478-48e2-a745-193a90e87553-host-run-multus-certs\") pod \"multus-f2tkr\" (UID: \"6b0e23f7-a478-48e2-a745-193a90e87553\") " pod="openshift-multus/multus-f2tkr" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.929893 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/6b0e23f7-a478-48e2-a745-193a90e87553-multus-daemon-config\") pod \"multus-f2tkr\" (UID: \"6b0e23f7-a478-48e2-a745-193a90e87553\") " pod="openshift-multus/multus-f2tkr" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.930090 4779 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.930112 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.930124 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.930135 4779 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.930145 4779 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.930172 4779 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.930185 4779 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.930195 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.930204 4779 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.930213 4779 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.930222 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.930998 4779 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.931021 4779 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.931057 4779 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.931067 4779 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.931076 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.931085 4779 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.931095 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.931132 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.931143 4779 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.931152 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.931160 4779 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.931168 4779 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.931177 4779 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.931186 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.931780 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.931802 4779 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.931814 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.931824 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.931834 4779 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.931843 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.931853 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.931675 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/95f4faf4-0a02-4440-ad6d-2ab0fae56bb6-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-twvvx\" (UID: \"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6\") " pod="openshift-multus/multus-additional-cni-plugins-twvvx" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.931745 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/6b0e23f7-a478-48e2-a745-193a90e87553-host-var-lib-cni-multus\") pod \"multus-f2tkr\" (UID: \"6b0e23f7-a478-48e2-a745-193a90e87553\") " pod="openshift-multus/multus-f2tkr" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.933300 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/f1a5d3a7-37d9-4a87-864c-e4af7f504a19-proxy-tls\") pod \"machine-config-daemon-5lnlv\" (UID: \"f1a5d3a7-37d9-4a87-864c-e4af7f504a19\") " pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.933348 4779 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.933364 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.933376 4779 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.933385 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.933394 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.933404 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.933414 4779 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.933423 4779 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.933433 4779 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.933442 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.933451 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.933459 4779 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.933467 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.933550 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.933560 4779 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.933575 4779 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.933584 4779 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.933593 4779 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.933602 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.933611 4779 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.933620 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.933629 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.933638 4779 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.933646 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.933695 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.933706 4779 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.933716 4779 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.933725 4779 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.933735 4779 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.933749 4779 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.933761 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.933770 4779 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.933779 4779 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.933789 4779 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.933805 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.933817 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.933826 4779 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.933834 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.933843 4779 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.933853 4779 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.933862 4779 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.933871 4779 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.933881 4779 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.933896 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.933929 4779 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.933942 4779 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.933953 4779 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.933964 4779 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.933977 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.933986 4779 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.933996 4779 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934005 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934014 4779 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934024 4779 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934033 4779 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934041 4779 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934050 4779 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934059 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934074 4779 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934083 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934093 4779 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934102 4779 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934111 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934120 4779 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934130 4779 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934142 4779 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934152 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934161 4779 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934172 4779 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934181 4779 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934189 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934199 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934207 4779 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934216 4779 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934224 4779 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934232 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934241 4779 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934249 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934257 4779 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934266 4779 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934275 4779 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934283 4779 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934291 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934300 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934308 4779 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934316 4779 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934323 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934332 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934340 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934349 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934357 4779 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934367 4779 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934375 4779 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934383 4779 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934391 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934400 4779 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934408 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934417 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934424 4779 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934433 4779 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934453 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934471 4779 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934483 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934494 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934504 4779 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934514 4779 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934526 4779 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934537 4779 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934547 4779 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934557 4779 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934566 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934787 4779 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934800 4779 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934809 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934817 4779 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934866 4779 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934876 4779 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934884 4779 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934891 4779 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.934920 4779 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.938478 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.947385 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nvwtv\" (UniqueName: \"kubernetes.io/projected/564bff56-93cb-42ac-bd34-bbe97f99f411-kube-api-access-nvwtv\") pod \"node-resolver-r5584\" (UID: \"564bff56-93cb-42ac-bd34-bbe97f99f411\") " pod="openshift-dns/node-resolver-r5584" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.947707 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.949920 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zr62p\" (UniqueName: \"kubernetes.io/projected/95f4faf4-0a02-4440-ad6d-2ab0fae56bb6-kube-api-access-zr62p\") pod \"multus-additional-cni-plugins-twvvx\" (UID: \"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6\") " pod="openshift-multus/multus-additional-cni-plugins-twvvx" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.949948 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x5rx8\" (UniqueName: \"kubernetes.io/projected/6b0e23f7-a478-48e2-a745-193a90e87553-kube-api-access-x5rx8\") pod \"multus-f2tkr\" (UID: \"6b0e23f7-a478-48e2-a745-193a90e87553\") " pod="openshift-multus/multus-f2tkr" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.950086 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7ndnj\" (UniqueName: \"kubernetes.io/projected/f1a5d3a7-37d9-4a87-864c-e4af7f504a19-kube-api-access-7ndnj\") pod \"machine-config-daemon-5lnlv\" (UID: \"f1a5d3a7-37d9-4a87-864c-e4af7f504a19\") " pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.950595 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.950623 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.950631 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.950644 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.950653 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:48Z","lastTransitionTime":"2025-09-29T09:29:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.954858 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5584" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"564bff56-93cb-42ac-bd34-bbe97f99f411\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvwtv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5584\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.962750 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.965130 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.968666 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5584" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"564bff56-93cb-42ac-bd34-bbe97f99f411\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvwtv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5584\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.972164 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 09:29:48 crc kubenswrapper[4779]: W0929 09:29:48.978205 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-0d91500a12229766c6926217c82468708715b91c63ab721b20a9e682d0e35dd1 WatchSource:0}: Error finding container 0d91500a12229766c6926217c82468708715b91c63ab721b20a9e682d0e35dd1: Status 404 returned error can't find the container with id 0d91500a12229766c6926217c82468708715b91c63ab721b20a9e682d0e35dd1 Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.978639 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2tkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b0e23f7-a478-48e2-a745-193a90e87553\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5rx8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2tkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.980608 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 09:29:48 crc kubenswrapper[4779]: W0929 09:29:48.986119 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef543e1b_8068_4ea3_b32a_61027b32e95d.slice/crio-08156d1fc14d8fcd03c5380ff9d5afbde03daae4a607c0e898ce25b34628437e WatchSource:0}: Error finding container 08156d1fc14d8fcd03c5380ff9d5afbde03daae4a607c0e898ce25b34628437e: Status 404 returned error can't find the container with id 08156d1fc14d8fcd03c5380ff9d5afbde03daae4a607c0e898ce25b34628437e Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.986456 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.990867 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-twvvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-twvvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 09:29:48 crc kubenswrapper[4779]: I0929 09:29:48.993477 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-f2tkr" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.000192 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-r5584" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.000409 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e291840-9db5-4515-810a-190428263dfb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6f627c2a2c06fbe034514d02939db8b7576b8cb753aa65a45a271482cf138c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde5335a7ca2db0e56ded3f2fc58506882df09718e6ada6dd1849b891ebc3dd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0f81c275d3f99ff02d4d105dfff34249cff38150c1aa6199bdc765773f0fa2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33e68ef001a27a30ef12923f95eaaef45667d18c8dab2997491caf12f595ed0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6837fc3f6d91505858a90e49945041b663c864b4ebcbd2ecc086c3575b47c1f7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T09:29:42Z\\\",\\\"message\\\":\\\"W0929 09:29:31.795697 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 09:29:31.796138 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759138171 cert, and key in /tmp/serving-cert-3245563206/serving-signer.crt, /tmp/serving-cert-3245563206/serving-signer.key\\\\nI0929 09:29:32.157414 1 observer_polling.go:159] Starting file observer\\\\nW0929 09:29:32.160455 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 09:29:32.160736 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 09:29:32.163324 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3245563206/tls.crt::/tmp/serving-cert-3245563206/tls.key\\\\\\\"\\\\nF0929 09:29:42.581603 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55099c1030bd4943e0df0b13b1203c7143f79edf5e02c8353ccd042610e8059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.005723 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-twvvx" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.009873 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.018494 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 09:29:49 crc kubenswrapper[4779]: W0929 09:29:49.019408 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf1a5d3a7_37d9_4a87_864c_e4af7f504a19.slice/crio-b9a61645ff8b8fe5f011b16479686b0449739c6ed8ac88b3f7f8a8b0a33ede1e WatchSource:0}: Error finding container b9a61645ff8b8fe5f011b16479686b0449739c6ed8ac88b3f7f8a8b0a33ede1e: Status 404 returned error can't find the container with id b9a61645ff8b8fe5f011b16479686b0449739c6ed8ac88b3f7f8a8b0a33ede1e Sep 29 09:29:49 crc kubenswrapper[4779]: W0929 09:29:49.024766 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6b0e23f7_a478_48e2_a745_193a90e87553.slice/crio-127078b8ebaaa0bc23f67bd0ffb9a58087f5c5f032dcbb8470cf160eb469cabd WatchSource:0}: Error finding container 127078b8ebaaa0bc23f67bd0ffb9a58087f5c5f032dcbb8470cf160eb469cabd: Status 404 returned error can't find the container with id 127078b8ebaaa0bc23f67bd0ffb9a58087f5c5f032dcbb8470cf160eb469cabd Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.029406 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.035692 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-etc-openvswitch\") pod \"ovnkube-node-ncxc4\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.035726 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-run-ovn\") pod \"ovnkube-node-ncxc4\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.035776 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/60d71749-dfb5-4095-b11b-b70f1a549b88-ovnkube-config\") pod \"ovnkube-node-ncxc4\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.035807 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-run-openvswitch\") pod \"ovnkube-node-ncxc4\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.035828 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-host-slash\") pod \"ovnkube-node-ncxc4\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.035847 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/60d71749-dfb5-4095-b11b-b70f1a549b88-ovnkube-script-lib\") pod \"ovnkube-node-ncxc4\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.035873 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-var-lib-openvswitch\") pod \"ovnkube-node-ncxc4\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.035893 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-log-socket\") pod \"ovnkube-node-ncxc4\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.035928 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/60d71749-dfb5-4095-b11b-b70f1a549b88-ovn-node-metrics-cert\") pod \"ovnkube-node-ncxc4\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.035963 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/60d71749-dfb5-4095-b11b-b70f1a549b88-env-overrides\") pod \"ovnkube-node-ncxc4\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.035986 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-host-kubelet\") pod \"ovnkube-node-ncxc4\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.036008 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-ncxc4\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.036048 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-host-run-ovn-kubernetes\") pod \"ovnkube-node-ncxc4\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.036124 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-host-cni-netd\") pod \"ovnkube-node-ncxc4\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.036207 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-node-log\") pod \"ovnkube-node-ncxc4\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.036241 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m99nd\" (UniqueName: \"kubernetes.io/projected/60d71749-dfb5-4095-b11b-b70f1a549b88-kube-api-access-m99nd\") pod \"ovnkube-node-ncxc4\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.036274 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-systemd-units\") pod \"ovnkube-node-ncxc4\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.036299 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-host-run-netns\") pod \"ovnkube-node-ncxc4\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.036330 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-host-cni-bin\") pod \"ovnkube-node-ncxc4\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.036356 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-run-systemd\") pod \"ovnkube-node-ncxc4\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.039404 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.048443 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1a5d3a7-37d9-4a87-864c-e4af7f504a19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5lnlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.055138 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.055182 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.055195 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.055439 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.056248 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:49Z","lastTransitionTime":"2025-09-29T09:29:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:49 crc kubenswrapper[4779]: W0929 09:29:49.067098 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod95f4faf4_0a02_4440_ad6d_2ab0fae56bb6.slice/crio-1d45a121c833626ad0159286c396a661555726a707fff1731238515293644c89 WatchSource:0}: Error finding container 1d45a121c833626ad0159286c396a661555726a707fff1731238515293644c89: Status 404 returned error can't find the container with id 1d45a121c833626ad0159286c396a661555726a707fff1731238515293644c89 Sep 29 09:29:49 crc kubenswrapper[4779]: W0929 09:29:49.067465 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod564bff56_93cb_42ac_bd34_bbe97f99f411.slice/crio-aeaa53d04d55c6d4f7e2ff79a2a86796c7d4486561e23caa5c202090859aa951 WatchSource:0}: Error finding container aeaa53d04d55c6d4f7e2ff79a2a86796c7d4486561e23caa5c202090859aa951: Status 404 returned error can't find the container with id aeaa53d04d55c6d4f7e2ff79a2a86796c7d4486561e23caa5c202090859aa951 Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.067398 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60d71749-dfb5-4095-b11b-b70f1a549b88\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ncxc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.079975 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.137499 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-run-openvswitch\") pod \"ovnkube-node-ncxc4\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.137543 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-host-slash\") pod \"ovnkube-node-ncxc4\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.137597 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/60d71749-dfb5-4095-b11b-b70f1a549b88-ovnkube-script-lib\") pod \"ovnkube-node-ncxc4\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.137611 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-run-openvswitch\") pod \"ovnkube-node-ncxc4\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.137624 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-var-lib-openvswitch\") pod \"ovnkube-node-ncxc4\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.137671 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-host-slash\") pod \"ovnkube-node-ncxc4\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.137674 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-log-socket\") pod \"ovnkube-node-ncxc4\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.137697 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/60d71749-dfb5-4095-b11b-b70f1a549b88-ovn-node-metrics-cert\") pod \"ovnkube-node-ncxc4\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.137721 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/60d71749-dfb5-4095-b11b-b70f1a549b88-env-overrides\") pod \"ovnkube-node-ncxc4\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.137747 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-ncxc4\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.137770 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-host-kubelet\") pod \"ovnkube-node-ncxc4\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.137802 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-host-run-ovn-kubernetes\") pod \"ovnkube-node-ncxc4\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.137822 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-host-cni-netd\") pod \"ovnkube-node-ncxc4\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.137844 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-node-log\") pod \"ovnkube-node-ncxc4\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.137867 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m99nd\" (UniqueName: \"kubernetes.io/projected/60d71749-dfb5-4095-b11b-b70f1a549b88-kube-api-access-m99nd\") pod \"ovnkube-node-ncxc4\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.137886 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-host-cni-bin\") pod \"ovnkube-node-ncxc4\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.137921 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-systemd-units\") pod \"ovnkube-node-ncxc4\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.137940 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-host-run-netns\") pod \"ovnkube-node-ncxc4\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.137960 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-run-systemd\") pod \"ovnkube-node-ncxc4\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.137981 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-etc-openvswitch\") pod \"ovnkube-node-ncxc4\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.138004 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-run-ovn\") pod \"ovnkube-node-ncxc4\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.138042 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/60d71749-dfb5-4095-b11b-b70f1a549b88-ovnkube-config\") pod \"ovnkube-node-ncxc4\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.137650 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-var-lib-openvswitch\") pod \"ovnkube-node-ncxc4\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.138030 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-host-kubelet\") pod \"ovnkube-node-ncxc4\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.138118 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-log-socket\") pod \"ovnkube-node-ncxc4\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.138126 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-host-run-ovn-kubernetes\") pod \"ovnkube-node-ncxc4\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.138162 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-host-cni-netd\") pod \"ovnkube-node-ncxc4\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.138193 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-node-log\") pod \"ovnkube-node-ncxc4\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.138327 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-run-systemd\") pod \"ovnkube-node-ncxc4\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.138385 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-host-cni-bin\") pod \"ovnkube-node-ncxc4\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.138423 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-systemd-units\") pod \"ovnkube-node-ncxc4\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.138456 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-host-run-netns\") pod \"ovnkube-node-ncxc4\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.138489 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-etc-openvswitch\") pod \"ovnkube-node-ncxc4\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.138527 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-run-ovn\") pod \"ovnkube-node-ncxc4\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.138600 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/60d71749-dfb5-4095-b11b-b70f1a549b88-ovnkube-script-lib\") pod \"ovnkube-node-ncxc4\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.138651 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-ncxc4\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.138935 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/60d71749-dfb5-4095-b11b-b70f1a549b88-env-overrides\") pod \"ovnkube-node-ncxc4\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.139159 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/60d71749-dfb5-4095-b11b-b70f1a549b88-ovnkube-config\") pod \"ovnkube-node-ncxc4\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.142431 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/60d71749-dfb5-4095-b11b-b70f1a549b88-ovn-node-metrics-cert\") pod \"ovnkube-node-ncxc4\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.170603 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m99nd\" (UniqueName: \"kubernetes.io/projected/60d71749-dfb5-4095-b11b-b70f1a549b88-kube-api-access-m99nd\") pod \"ovnkube-node-ncxc4\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.179444 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.179472 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.179483 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.179498 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.179507 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:49Z","lastTransitionTime":"2025-09-29T09:29:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.200111 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:49 crc kubenswrapper[4779]: W0929 09:29:49.240796 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod60d71749_dfb5_4095_b11b_b70f1a549b88.slice/crio-cd89eb93623faf9e6fe6ae575af0eabde3aaed1a89def7af1a48eb45c9854932 WatchSource:0}: Error finding container cd89eb93623faf9e6fe6ae575af0eabde3aaed1a89def7af1a48eb45c9854932: Status 404 returned error can't find the container with id cd89eb93623faf9e6fe6ae575af0eabde3aaed1a89def7af1a48eb45c9854932 Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.282581 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.282612 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.282623 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.282640 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.282650 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:49Z","lastTransitionTime":"2025-09-29T09:29:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.339809 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 09:29:49 crc kubenswrapper[4779]: E0929 09:29:49.340009 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 09:29:50.339980355 +0000 UTC m=+22.321304269 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.340104 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.340153 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:29:49 crc kubenswrapper[4779]: E0929 09:29:49.340249 4779 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 09:29:49 crc kubenswrapper[4779]: E0929 09:29:49.340262 4779 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 09:29:49 crc kubenswrapper[4779]: E0929 09:29:49.340328 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 09:29:50.340306765 +0000 UTC m=+22.321630719 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 09:29:49 crc kubenswrapper[4779]: E0929 09:29:49.340361 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 09:29:50.340344676 +0000 UTC m=+22.321668580 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.384424 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.384719 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.384730 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.384749 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.384760 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:49Z","lastTransitionTime":"2025-09-29T09:29:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.441048 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.441101 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:29:49 crc kubenswrapper[4779]: E0929 09:29:49.441247 4779 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 09:29:49 crc kubenswrapper[4779]: E0929 09:29:49.441264 4779 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 09:29:49 crc kubenswrapper[4779]: E0929 09:29:49.441258 4779 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 09:29:49 crc kubenswrapper[4779]: E0929 09:29:49.441301 4779 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 09:29:49 crc kubenswrapper[4779]: E0929 09:29:49.441314 4779 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 09:29:49 crc kubenswrapper[4779]: E0929 09:29:49.441366 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-29 09:29:50.441351986 +0000 UTC m=+22.422675890 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 09:29:49 crc kubenswrapper[4779]: E0929 09:29:49.441275 4779 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 09:29:49 crc kubenswrapper[4779]: E0929 09:29:49.441454 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-29 09:29:50.441440439 +0000 UTC m=+22.422764343 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.487288 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.487338 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.487348 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.487364 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.487374 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:49Z","lastTransitionTime":"2025-09-29T09:29:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.589708 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.589748 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.589759 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.589775 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.589784 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:49Z","lastTransitionTime":"2025-09-29T09:29:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.692742 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.692779 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.692790 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.692803 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.692812 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:49Z","lastTransitionTime":"2025-09-29T09:29:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.713341 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:29:49 crc kubenswrapper[4779]: E0929 09:29:49.713458 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.795980 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.796043 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.796055 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.796079 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.796092 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:49Z","lastTransitionTime":"2025-09-29T09:29:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.827386 4779 generic.go:334] "Generic (PLEG): container finished" podID="95f4faf4-0a02-4440-ad6d-2ab0fae56bb6" containerID="4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f" exitCode=0 Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.827452 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-twvvx" event={"ID":"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6","Type":"ContainerDied","Data":"4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f"} Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.827476 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-twvvx" event={"ID":"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6","Type":"ContainerStarted","Data":"1d45a121c833626ad0159286c396a661555726a707fff1731238515293644c89"} Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.831799 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-f2tkr" event={"ID":"6b0e23f7-a478-48e2-a745-193a90e87553","Type":"ContainerStarted","Data":"944ced544427edda31dc5b05053a08a83a71829a896cb6dba002ca20ee3e56e4"} Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.831829 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-f2tkr" event={"ID":"6b0e23f7-a478-48e2-a745-193a90e87553","Type":"ContainerStarted","Data":"127078b8ebaaa0bc23f67bd0ffb9a58087f5c5f032dcbb8470cf160eb469cabd"} Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.838428 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"4b7c1ae84b06c717af1d6426429e3fb8a246ec106eceda451d7e508e3f10892a"} Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.839974 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" event={"ID":"f1a5d3a7-37d9-4a87-864c-e4af7f504a19","Type":"ContainerStarted","Data":"462a76735c6e380ec32967ba87ebe13a19e1d3687ac87182f01b23ce8fabe8e5"} Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.840023 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" event={"ID":"f1a5d3a7-37d9-4a87-864c-e4af7f504a19","Type":"ContainerStarted","Data":"c7979507aa9ce4c5e1c0f9a0d07716433ab9cebcc02e8b6ba01251ced0bab99f"} Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.840039 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" event={"ID":"f1a5d3a7-37d9-4a87-864c-e4af7f504a19","Type":"ContainerStarted","Data":"b9a61645ff8b8fe5f011b16479686b0449739c6ed8ac88b3f7f8a8b0a33ede1e"} Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.840645 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1a5d3a7-37d9-4a87-864c-e4af7f504a19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5lnlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.842011 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"0f96f3b3b0028e52c62e9b886d5f9c3347ec85f91ec0e5ef3de48bd216a2c687"} Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.842044 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"2f10690e584c904be791611af52b807ee3d20cb5551821042a1c5e71e1f1571d"} Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.842056 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"08156d1fc14d8fcd03c5380ff9d5afbde03daae4a607c0e898ce25b34628437e"} Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.843440 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"2adc78ec8f54811c8fd09a5e558132f9f57501d9a92d4738935148bb4e714eb3"} Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.843477 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"0d91500a12229766c6926217c82468708715b91c63ab721b20a9e682d0e35dd1"} Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.845539 4779 generic.go:334] "Generic (PLEG): container finished" podID="60d71749-dfb5-4095-b11b-b70f1a549b88" containerID="4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2" exitCode=0 Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.845592 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" event={"ID":"60d71749-dfb5-4095-b11b-b70f1a549b88","Type":"ContainerDied","Data":"4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2"} Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.845610 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" event={"ID":"60d71749-dfb5-4095-b11b-b70f1a549b88","Type":"ContainerStarted","Data":"cd89eb93623faf9e6fe6ae575af0eabde3aaed1a89def7af1a48eb45c9854932"} Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.848186 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-r5584" event={"ID":"564bff56-93cb-42ac-bd34-bbe97f99f411","Type":"ContainerStarted","Data":"eedbde6cf29bb77c762d7da2d2e414d3e7c2c6edb3addd97473367b89b2e3fe7"} Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.848250 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-r5584" event={"ID":"564bff56-93cb-42ac-bd34-bbe97f99f411","Type":"ContainerStarted","Data":"aeaa53d04d55c6d4f7e2ff79a2a86796c7d4486561e23caa5c202090859aa951"} Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.869521 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60d71749-dfb5-4095-b11b-b70f1a549b88\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ncxc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:49Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.887954 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:49Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.901291 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.901340 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.901349 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.901365 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.901374 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:49Z","lastTransitionTime":"2025-09-29T09:29:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.906480 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:49Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.924077 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5584" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"564bff56-93cb-42ac-bd34-bbe97f99f411\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvwtv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5584\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:49Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.941058 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e291840-9db5-4515-810a-190428263dfb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6f627c2a2c06fbe034514d02939db8b7576b8cb753aa65a45a271482cf138c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde5335a7ca2db0e56ded3f2fc58506882df09718e6ada6dd1849b891ebc3dd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0f81c275d3f99ff02d4d105dfff34249cff38150c1aa6199bdc765773f0fa2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33e68ef001a27a30ef12923f95eaaef45667d18c8dab2997491caf12f595ed0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6837fc3f6d91505858a90e49945041b663c864b4ebcbd2ecc086c3575b47c1f7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T09:29:42Z\\\",\\\"message\\\":\\\"W0929 09:29:31.795697 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 09:29:31.796138 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759138171 cert, and key in /tmp/serving-cert-3245563206/serving-signer.crt, /tmp/serving-cert-3245563206/serving-signer.key\\\\nI0929 09:29:32.157414 1 observer_polling.go:159] Starting file observer\\\\nW0929 09:29:32.160455 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 09:29:32.160736 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 09:29:32.163324 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3245563206/tls.crt::/tmp/serving-cert-3245563206/tls.key\\\\\\\"\\\\nF0929 09:29:42.581603 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55099c1030bd4943e0df0b13b1203c7143f79edf5e02c8353ccd042610e8059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:49Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.964214 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:49Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.976660 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:49Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:49 crc kubenswrapper[4779]: I0929 09:29:49.988611 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:49Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.001474 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:49Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.004604 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.004649 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.004662 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.004680 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.004694 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:50Z","lastTransitionTime":"2025-09-29T09:29:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.014564 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2tkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b0e23f7-a478-48e2-a745-193a90e87553\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5rx8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2tkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:50Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.030611 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-twvvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-twvvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:50Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.044750 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2adc78ec8f54811c8fd09a5e558132f9f57501d9a92d4738935148bb4e714eb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:50Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.056068 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5584" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"564bff56-93cb-42ac-bd34-bbe97f99f411\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedbde6cf29bb77c762d7da2d2e414d3e7c2c6edb3addd97473367b89b2e3fe7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvwtv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5584\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:50Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.066746 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2tkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b0e23f7-a478-48e2-a745-193a90e87553\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://944ced544427edda31dc5b05053a08a83a71829a896cb6dba002ca20ee3e56e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5rx8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2tkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:50Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.084443 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-twvvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-twvvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:50Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.106541 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.106689 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.106708 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.106724 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.106734 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:50Z","lastTransitionTime":"2025-09-29T09:29:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.107556 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e291840-9db5-4515-810a-190428263dfb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6f627c2a2c06fbe034514d02939db8b7576b8cb753aa65a45a271482cf138c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde5335a7ca2db0e56ded3f2fc58506882df09718e6ada6dd1849b891ebc3dd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0f81c275d3f99ff02d4d105dfff34249cff38150c1aa6199bdc765773f0fa2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33e68ef001a27a30ef12923f95eaaef45667d18c8dab2997491caf12f595ed0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6837fc3f6d91505858a90e49945041b663c864b4ebcbd2ecc086c3575b47c1f7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T09:29:42Z\\\",\\\"message\\\":\\\"W0929 09:29:31.795697 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 09:29:31.796138 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759138171 cert, and key in /tmp/serving-cert-3245563206/serving-signer.crt, /tmp/serving-cert-3245563206/serving-signer.key\\\\nI0929 09:29:32.157414 1 observer_polling.go:159] Starting file observer\\\\nW0929 09:29:32.160455 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 09:29:32.160736 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 09:29:32.163324 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3245563206/tls.crt::/tmp/serving-cert-3245563206/tls.key\\\\\\\"\\\\nF0929 09:29:42.581603 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55099c1030bd4943e0df0b13b1203c7143f79edf5e02c8353ccd042610e8059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:50Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.130481 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:50Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.143654 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f96f3b3b0028e52c62e9b886d5f9c3347ec85f91ec0e5ef3de48bd216a2c687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f10690e584c904be791611af52b807ee3d20cb5551821042a1c5e71e1f1571d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:50Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.154576 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:50Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.165193 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:50Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.176292 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1a5d3a7-37d9-4a87-864c-e4af7f504a19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462a76735c6e380ec32967ba87ebe13a19e1d3687ac87182f01b23ce8fabe8e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7979507aa9ce4c5e1c0f9a0d07716433ab9cebcc02e8b6ba01251ced0bab99f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5lnlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:50Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.195924 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60d71749-dfb5-4095-b11b-b70f1a549b88\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ncxc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:50Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.207474 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:50Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.208846 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.208876 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.208886 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.208920 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.208931 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:50Z","lastTransitionTime":"2025-09-29T09:29:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.311103 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.311146 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.311157 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.311177 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.311189 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:50Z","lastTransitionTime":"2025-09-29T09:29:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.349582 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 09:29:50 crc kubenswrapper[4779]: E0929 09:29:50.349729 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 09:29:52.349707197 +0000 UTC m=+24.331031101 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.350033 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.350073 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:29:50 crc kubenswrapper[4779]: E0929 09:29:50.350182 4779 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 09:29:50 crc kubenswrapper[4779]: E0929 09:29:50.350207 4779 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 09:29:50 crc kubenswrapper[4779]: E0929 09:29:50.350230 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 09:29:52.350220452 +0000 UTC m=+24.331544356 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 09:29:50 crc kubenswrapper[4779]: E0929 09:29:50.350309 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 09:29:52.350291434 +0000 UTC m=+24.331615338 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.414138 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.414165 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.414175 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.414188 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.414197 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:50Z","lastTransitionTime":"2025-09-29T09:29:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.450926 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.450974 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:29:50 crc kubenswrapper[4779]: E0929 09:29:50.451127 4779 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 09:29:50 crc kubenswrapper[4779]: E0929 09:29:50.451146 4779 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 09:29:50 crc kubenswrapper[4779]: E0929 09:29:50.451159 4779 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 09:29:50 crc kubenswrapper[4779]: E0929 09:29:50.451208 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-29 09:29:52.451192862 +0000 UTC m=+24.432516766 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 09:29:50 crc kubenswrapper[4779]: E0929 09:29:50.451560 4779 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 09:29:50 crc kubenswrapper[4779]: E0929 09:29:50.451576 4779 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 09:29:50 crc kubenswrapper[4779]: E0929 09:29:50.451587 4779 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 09:29:50 crc kubenswrapper[4779]: E0929 09:29:50.451616 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-29 09:29:52.451605864 +0000 UTC m=+24.432929778 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.516476 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.516506 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.516515 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.516530 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.516538 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:50Z","lastTransitionTime":"2025-09-29T09:29:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.618846 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.618890 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.618923 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.618941 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.618954 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:50Z","lastTransitionTime":"2025-09-29T09:29:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.686247 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-gx6f2"] Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.686793 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-gx6f2" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.690095 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.690150 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.690525 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.690571 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.713970 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.713963 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:50Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.713981 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:29:50 crc kubenswrapper[4779]: E0929 09:29:50.714136 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 09:29:50 crc kubenswrapper[4779]: E0929 09:29:50.714192 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.719624 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.720507 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.721514 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.722314 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.722885 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.723008 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.723111 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.723233 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.723310 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:50Z","lastTransitionTime":"2025-09-29T09:29:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.723081 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.724661 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.725630 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.727326 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.728431 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.730014 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.731113 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.738583 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.739761 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.745087 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.746032 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.747945 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.749112 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.755674 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2tkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b0e23f7-a478-48e2-a745-193a90e87553\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://944ced544427edda31dc5b05053a08a83a71829a896cb6dba002ca20ee3e56e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5rx8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2tkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:50Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.757021 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.758354 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.759361 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.764386 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.765189 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.772079 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.772873 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.773444 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.774538 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.775585 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.776209 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.777239 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.778116 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.783431 4779 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.783537 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.785180 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.785774 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-twvvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-twvvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:50Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.786553 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.787036 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.790257 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.791403 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.791973 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.793600 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.794357 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.794826 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.795761 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.797139 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.797792 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.799341 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.799866 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.800756 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.801479 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.802324 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.802839 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.803593 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.804852 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.805647 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.806708 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.808888 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e291840-9db5-4515-810a-190428263dfb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6f627c2a2c06fbe034514d02939db8b7576b8cb753aa65a45a271482cf138c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde5335a7ca2db0e56ded3f2fc58506882df09718e6ada6dd1849b891ebc3dd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0f81c275d3f99ff02d4d105dfff34249cff38150c1aa6199bdc765773f0fa2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33e68ef001a27a30ef12923f95eaaef45667d18c8dab2997491caf12f595ed0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6837fc3f6d91505858a90e49945041b663c864b4ebcbd2ecc086c3575b47c1f7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T09:29:42Z\\\",\\\"message\\\":\\\"W0929 09:29:31.795697 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 09:29:31.796138 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759138171 cert, and key in /tmp/serving-cert-3245563206/serving-signer.crt, /tmp/serving-cert-3245563206/serving-signer.key\\\\nI0929 09:29:32.157414 1 observer_polling.go:159] Starting file observer\\\\nW0929 09:29:32.160455 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 09:29:32.160736 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 09:29:32.163324 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3245563206/tls.crt::/tmp/serving-cert-3245563206/tls.key\\\\\\\"\\\\nF0929 09:29:42.581603 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55099c1030bd4943e0df0b13b1203c7143f79edf5e02c8353ccd042610e8059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:50Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.822237 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:50Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.825315 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.825340 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.825349 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.825363 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.825372 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:50Z","lastTransitionTime":"2025-09-29T09:29:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.835492 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f96f3b3b0028e52c62e9b886d5f9c3347ec85f91ec0e5ef3de48bd216a2c687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f10690e584c904be791611af52b807ee3d20cb5551821042a1c5e71e1f1571d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:50Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.848184 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:50Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.852094 4779 generic.go:334] "Generic (PLEG): container finished" podID="95f4faf4-0a02-4440-ad6d-2ab0fae56bb6" containerID="693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45" exitCode=0 Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.852140 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-twvvx" event={"ID":"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6","Type":"ContainerDied","Data":"693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45"} Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.855788 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/25e887ba-720d-4f7b-9763-5703781fd8bf-host\") pod \"node-ca-gx6f2\" (UID: \"25e887ba-720d-4f7b-9763-5703781fd8bf\") " pod="openshift-image-registry/node-ca-gx6f2" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.855836 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w9lw7\" (UniqueName: \"kubernetes.io/projected/25e887ba-720d-4f7b-9763-5703781fd8bf-kube-api-access-w9lw7\") pod \"node-ca-gx6f2\" (UID: \"25e887ba-720d-4f7b-9763-5703781fd8bf\") " pod="openshift-image-registry/node-ca-gx6f2" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.855861 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/25e887ba-720d-4f7b-9763-5703781fd8bf-serviceca\") pod \"node-ca-gx6f2\" (UID: \"25e887ba-720d-4f7b-9763-5703781fd8bf\") " pod="openshift-image-registry/node-ca-gx6f2" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.856125 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" event={"ID":"60d71749-dfb5-4095-b11b-b70f1a549b88","Type":"ContainerStarted","Data":"a334fd50a045f97d97f67b45abf88dc6aaf3d7c2121becc6a9744c86c977b070"} Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.856165 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" event={"ID":"60d71749-dfb5-4095-b11b-b70f1a549b88","Type":"ContainerStarted","Data":"a11183ba612a838b92d401b13a2e0d7d1c19de533ecef56b4305e5a9f0784bb4"} Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.856178 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" event={"ID":"60d71749-dfb5-4095-b11b-b70f1a549b88","Type":"ContainerStarted","Data":"6d6677148b1efa21f5eee1dcbccef6d6ea0492513bfa621b1a862488653bc8f3"} Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.860324 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1a5d3a7-37d9-4a87-864c-e4af7f504a19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462a76735c6e380ec32967ba87ebe13a19e1d3687ac87182f01b23ce8fabe8e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7979507aa9ce4c5e1c0f9a0d07716433ab9cebcc02e8b6ba01251ced0bab99f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5lnlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:50Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.894616 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60d71749-dfb5-4095-b11b-b70f1a549b88\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ncxc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:50Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.917357 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:50Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.928943 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.928977 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.928988 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.929005 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.929015 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:50Z","lastTransitionTime":"2025-09-29T09:29:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.936403 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2adc78ec8f54811c8fd09a5e558132f9f57501d9a92d4738935148bb4e714eb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:50Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.950793 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5584" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"564bff56-93cb-42ac-bd34-bbe97f99f411\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedbde6cf29bb77c762d7da2d2e414d3e7c2c6edb3addd97473367b89b2e3fe7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvwtv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5584\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:50Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.959286 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/25e887ba-720d-4f7b-9763-5703781fd8bf-host\") pod \"node-ca-gx6f2\" (UID: \"25e887ba-720d-4f7b-9763-5703781fd8bf\") " pod="openshift-image-registry/node-ca-gx6f2" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.959342 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w9lw7\" (UniqueName: \"kubernetes.io/projected/25e887ba-720d-4f7b-9763-5703781fd8bf-kube-api-access-w9lw7\") pod \"node-ca-gx6f2\" (UID: \"25e887ba-720d-4f7b-9763-5703781fd8bf\") " pod="openshift-image-registry/node-ca-gx6f2" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.959368 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/25e887ba-720d-4f7b-9763-5703781fd8bf-serviceca\") pod \"node-ca-gx6f2\" (UID: \"25e887ba-720d-4f7b-9763-5703781fd8bf\") " pod="openshift-image-registry/node-ca-gx6f2" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.960308 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/25e887ba-720d-4f7b-9763-5703781fd8bf-serviceca\") pod \"node-ca-gx6f2\" (UID: \"25e887ba-720d-4f7b-9763-5703781fd8bf\") " pod="openshift-image-registry/node-ca-gx6f2" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.960436 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/25e887ba-720d-4f7b-9763-5703781fd8bf-host\") pod \"node-ca-gx6f2\" (UID: \"25e887ba-720d-4f7b-9763-5703781fd8bf\") " pod="openshift-image-registry/node-ca-gx6f2" Sep 29 09:29:50 crc kubenswrapper[4779]: I0929 09:29:50.963567 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gx6f2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25e887ba-720d-4f7b-9763-5703781fd8bf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9lw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gx6f2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:50Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:50.989838 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60d71749-dfb5-4095-b11b-b70f1a549b88\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ncxc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:50Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:50.992988 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w9lw7\" (UniqueName: \"kubernetes.io/projected/25e887ba-720d-4f7b-9763-5703781fd8bf-kube-api-access-w9lw7\") pod \"node-ca-gx6f2\" (UID: \"25e887ba-720d-4f7b-9763-5703781fd8bf\") " pod="openshift-image-registry/node-ca-gx6f2" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.015819 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1a5d3a7-37d9-4a87-864c-e4af7f504a19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462a76735c6e380ec32967ba87ebe13a19e1d3687ac87182f01b23ce8fabe8e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7979507aa9ce4c5e1c0f9a0d07716433ab9cebcc02e8b6ba01251ced0bab99f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5lnlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:51Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.033370 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:51Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.034828 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.034851 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.034859 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.034872 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.034881 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:51Z","lastTransitionTime":"2025-09-29T09:29:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.049450 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2adc78ec8f54811c8fd09a5e558132f9f57501d9a92d4738935148bb4e714eb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:51Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.060118 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5584" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"564bff56-93cb-42ac-bd34-bbe97f99f411\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedbde6cf29bb77c762d7da2d2e414d3e7c2c6edb3addd97473367b89b2e3fe7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvwtv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5584\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:51Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.079809 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gx6f2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25e887ba-720d-4f7b-9763-5703781fd8bf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9lw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gx6f2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:51Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.098486 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f96f3b3b0028e52c62e9b886d5f9c3347ec85f91ec0e5ef3de48bd216a2c687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f10690e584c904be791611af52b807ee3d20cb5551821042a1c5e71e1f1571d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:51Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.111215 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:51Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.127933 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:51Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.137482 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.137511 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.137518 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.137533 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.137543 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:51Z","lastTransitionTime":"2025-09-29T09:29:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.139940 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2tkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b0e23f7-a478-48e2-a745-193a90e87553\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://944ced544427edda31dc5b05053a08a83a71829a896cb6dba002ca20ee3e56e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5rx8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2tkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:51Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.162923 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-twvvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-twvvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:51Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.178434 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e291840-9db5-4515-810a-190428263dfb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6f627c2a2c06fbe034514d02939db8b7576b8cb753aa65a45a271482cf138c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde5335a7ca2db0e56ded3f2fc58506882df09718e6ada6dd1849b891ebc3dd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0f81c275d3f99ff02d4d105dfff34249cff38150c1aa6199bdc765773f0fa2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33e68ef001a27a30ef12923f95eaaef45667d18c8dab2997491caf12f595ed0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6837fc3f6d91505858a90e49945041b663c864b4ebcbd2ecc086c3575b47c1f7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T09:29:42Z\\\",\\\"message\\\":\\\"W0929 09:29:31.795697 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 09:29:31.796138 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759138171 cert, and key in /tmp/serving-cert-3245563206/serving-signer.crt, /tmp/serving-cert-3245563206/serving-signer.key\\\\nI0929 09:29:32.157414 1 observer_polling.go:159] Starting file observer\\\\nW0929 09:29:32.160455 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 09:29:32.160736 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 09:29:32.163324 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3245563206/tls.crt::/tmp/serving-cert-3245563206/tls.key\\\\\\\"\\\\nF0929 09:29:42.581603 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55099c1030bd4943e0df0b13b1203c7143f79edf5e02c8353ccd042610e8059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:51Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.191623 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:51Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.207950 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-gx6f2" Sep 29 09:29:51 crc kubenswrapper[4779]: W0929 09:29:51.223916 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod25e887ba_720d_4f7b_9763_5703781fd8bf.slice/crio-fddfb8899670a5a2dd04b548cdbdebab350c966e9ee07084ea86e4c9f1537e71 WatchSource:0}: Error finding container fddfb8899670a5a2dd04b548cdbdebab350c966e9ee07084ea86e4c9f1537e71: Status 404 returned error can't find the container with id fddfb8899670a5a2dd04b548cdbdebab350c966e9ee07084ea86e4c9f1537e71 Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.240317 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.240357 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.240368 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.240384 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.240401 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:51Z","lastTransitionTime":"2025-09-29T09:29:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.343607 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.343645 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.343657 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.343675 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.343690 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:51Z","lastTransitionTime":"2025-09-29T09:29:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.445933 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.445971 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.445983 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.446001 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.446012 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:51Z","lastTransitionTime":"2025-09-29T09:29:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.549626 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.549663 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.549672 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.549687 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.549697 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:51Z","lastTransitionTime":"2025-09-29T09:29:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.651860 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.651950 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.651974 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.652001 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.652023 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:51Z","lastTransitionTime":"2025-09-29T09:29:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.714200 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:29:51 crc kubenswrapper[4779]: E0929 09:29:51.714429 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.754424 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.754471 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.754486 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.754505 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.754522 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:51Z","lastTransitionTime":"2025-09-29T09:29:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.857128 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.857171 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.857185 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.857204 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.857218 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:51Z","lastTransitionTime":"2025-09-29T09:29:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.860239 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"9f75b386997d5f82c8a8136e8758dac2b770b2ef60bec255c70978046c3e6cd6"} Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.863726 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" event={"ID":"60d71749-dfb5-4095-b11b-b70f1a549b88","Type":"ContainerStarted","Data":"7851ecb6e73d56037db56d7d12fec0a095403080daa7bbf0d4cd79d9a4fd1bd4"} Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.863763 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" event={"ID":"60d71749-dfb5-4095-b11b-b70f1a549b88","Type":"ContainerStarted","Data":"fa8dee8aadd625828a22e22668d8f7295ddc3fe24976be6c8610da1157e28d7c"} Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.863780 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" event={"ID":"60d71749-dfb5-4095-b11b-b70f1a549b88","Type":"ContainerStarted","Data":"d79814aeee8cb50cbe9a3153ed1a99a82921ac740553c82f6aafd1d7f628cbdd"} Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.866772 4779 generic.go:334] "Generic (PLEG): container finished" podID="95f4faf4-0a02-4440-ad6d-2ab0fae56bb6" containerID="dfe38229bcd948f0f8e1e234c8e4ea16d4dbc8c5c4252f5486048db0c9910412" exitCode=0 Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.866837 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-twvvx" event={"ID":"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6","Type":"ContainerDied","Data":"dfe38229bcd948f0f8e1e234c8e4ea16d4dbc8c5c4252f5486048db0c9910412"} Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.868388 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-gx6f2" event={"ID":"25e887ba-720d-4f7b-9763-5703781fd8bf","Type":"ContainerStarted","Data":"2a1604b3eb7d80cc28d7d36550e740405d3012bb83109444bf7354793ebbdeb0"} Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.868413 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-gx6f2" event={"ID":"25e887ba-720d-4f7b-9763-5703781fd8bf","Type":"ContainerStarted","Data":"fddfb8899670a5a2dd04b548cdbdebab350c966e9ee07084ea86e4c9f1537e71"} Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.878998 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2adc78ec8f54811c8fd09a5e558132f9f57501d9a92d4738935148bb4e714eb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:51Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.891921 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5584" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"564bff56-93cb-42ac-bd34-bbe97f99f411\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedbde6cf29bb77c762d7da2d2e414d3e7c2c6edb3addd97473367b89b2e3fe7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvwtv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5584\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:51Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.907017 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gx6f2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25e887ba-720d-4f7b-9763-5703781fd8bf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9lw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gx6f2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:51Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.921669 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2tkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b0e23f7-a478-48e2-a745-193a90e87553\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://944ced544427edda31dc5b05053a08a83a71829a896cb6dba002ca20ee3e56e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5rx8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2tkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:51Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.936183 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-twvvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-twvvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:51Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.951703 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e291840-9db5-4515-810a-190428263dfb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6f627c2a2c06fbe034514d02939db8b7576b8cb753aa65a45a271482cf138c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde5335a7ca2db0e56ded3f2fc58506882df09718e6ada6dd1849b891ebc3dd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0f81c275d3f99ff02d4d105dfff34249cff38150c1aa6199bdc765773f0fa2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33e68ef001a27a30ef12923f95eaaef45667d18c8dab2997491caf12f595ed0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6837fc3f6d91505858a90e49945041b663c864b4ebcbd2ecc086c3575b47c1f7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T09:29:42Z\\\",\\\"message\\\":\\\"W0929 09:29:31.795697 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 09:29:31.796138 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759138171 cert, and key in /tmp/serving-cert-3245563206/serving-signer.crt, /tmp/serving-cert-3245563206/serving-signer.key\\\\nI0929 09:29:32.157414 1 observer_polling.go:159] Starting file observer\\\\nW0929 09:29:32.160455 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 09:29:32.160736 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 09:29:32.163324 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3245563206/tls.crt::/tmp/serving-cert-3245563206/tls.key\\\\\\\"\\\\nF0929 09:29:42.581603 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55099c1030bd4943e0df0b13b1203c7143f79edf5e02c8353ccd042610e8059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:51Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.959564 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.959605 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.959618 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.959638 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.959652 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:51Z","lastTransitionTime":"2025-09-29T09:29:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.965680 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:51Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.977664 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f96f3b3b0028e52c62e9b886d5f9c3347ec85f91ec0e5ef3de48bd216a2c687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f10690e584c904be791611af52b807ee3d20cb5551821042a1c5e71e1f1571d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:51Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.989131 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f75b386997d5f82c8a8136e8758dac2b770b2ef60bec255c70978046c3e6cd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:51Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:51 crc kubenswrapper[4779]: I0929 09:29:51.998846 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:51Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.013406 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1a5d3a7-37d9-4a87-864c-e4af7f504a19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462a76735c6e380ec32967ba87ebe13a19e1d3687ac87182f01b23ce8fabe8e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7979507aa9ce4c5e1c0f9a0d07716433ab9cebcc02e8b6ba01251ced0bab99f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5lnlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:52Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.033678 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60d71749-dfb5-4095-b11b-b70f1a549b88\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ncxc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:52Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.046391 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:52Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.047554 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.060454 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:52Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.061311 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.062780 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.062822 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.062834 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.062852 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.062863 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:52Z","lastTransitionTime":"2025-09-29T09:29:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.063508 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.081487 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2adc78ec8f54811c8fd09a5e558132f9f57501d9a92d4738935148bb4e714eb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:52Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.094282 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5584" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"564bff56-93cb-42ac-bd34-bbe97f99f411\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedbde6cf29bb77c762d7da2d2e414d3e7c2c6edb3addd97473367b89b2e3fe7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvwtv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5584\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:52Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.107480 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gx6f2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25e887ba-720d-4f7b-9763-5703781fd8bf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a1604b3eb7d80cc28d7d36550e740405d3012bb83109444bf7354793ebbdeb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9lw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gx6f2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:52Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.122506 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f96f3b3b0028e52c62e9b886d5f9c3347ec85f91ec0e5ef3de48bd216a2c687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f10690e584c904be791611af52b807ee3d20cb5551821042a1c5e71e1f1571d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:52Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.133015 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f75b386997d5f82c8a8136e8758dac2b770b2ef60bec255c70978046c3e6cd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:52Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.147065 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:52Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.162126 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2tkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b0e23f7-a478-48e2-a745-193a90e87553\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://944ced544427edda31dc5b05053a08a83a71829a896cb6dba002ca20ee3e56e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5rx8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2tkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:52Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.165764 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.165817 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.165830 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.165850 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.165865 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:52Z","lastTransitionTime":"2025-09-29T09:29:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.178968 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-twvvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfe38229bcd948f0f8e1e234c8e4ea16d4dbc8c5c4252f5486048db0c9910412\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfe38229bcd948f0f8e1e234c8e4ea16d4dbc8c5c4252f5486048db0c9910412\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-twvvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:52Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.195540 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e291840-9db5-4515-810a-190428263dfb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6f627c2a2c06fbe034514d02939db8b7576b8cb753aa65a45a271482cf138c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde5335a7ca2db0e56ded3f2fc58506882df09718e6ada6dd1849b891ebc3dd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0f81c275d3f99ff02d4d105dfff34249cff38150c1aa6199bdc765773f0fa2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33e68ef001a27a30ef12923f95eaaef45667d18c8dab2997491caf12f595ed0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6837fc3f6d91505858a90e49945041b663c864b4ebcbd2ecc086c3575b47c1f7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T09:29:42Z\\\",\\\"message\\\":\\\"W0929 09:29:31.795697 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 09:29:31.796138 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759138171 cert, and key in /tmp/serving-cert-3245563206/serving-signer.crt, /tmp/serving-cert-3245563206/serving-signer.key\\\\nI0929 09:29:32.157414 1 observer_polling.go:159] Starting file observer\\\\nW0929 09:29:32.160455 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 09:29:32.160736 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 09:29:32.163324 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3245563206/tls.crt::/tmp/serving-cert-3245563206/tls.key\\\\\\\"\\\\nF0929 09:29:42.581603 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55099c1030bd4943e0df0b13b1203c7143f79edf5e02c8353ccd042610e8059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:52Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.207459 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:52Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.229056 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60d71749-dfb5-4095-b11b-b70f1a549b88\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ncxc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:52Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.239965 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1a5d3a7-37d9-4a87-864c-e4af7f504a19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462a76735c6e380ec32967ba87ebe13a19e1d3687ac87182f01b23ce8fabe8e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7979507aa9ce4c5e1c0f9a0d07716433ab9cebcc02e8b6ba01251ced0bab99f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5lnlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:52Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.253716 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:52Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.267364 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2adc78ec8f54811c8fd09a5e558132f9f57501d9a92d4738935148bb4e714eb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:52Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.268829 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.268880 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.268895 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.268938 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.268955 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:52Z","lastTransitionTime":"2025-09-29T09:29:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.278973 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5584" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"564bff56-93cb-42ac-bd34-bbe97f99f411\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedbde6cf29bb77c762d7da2d2e414d3e7c2c6edb3addd97473367b89b2e3fe7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvwtv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5584\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:52Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.287803 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gx6f2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25e887ba-720d-4f7b-9763-5703781fd8bf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a1604b3eb7d80cc28d7d36550e740405d3012bb83109444bf7354793ebbdeb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9lw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gx6f2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:52Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.299888 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f96f3b3b0028e52c62e9b886d5f9c3347ec85f91ec0e5ef3de48bd216a2c687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f10690e584c904be791611af52b807ee3d20cb5551821042a1c5e71e1f1571d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:52Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.338761 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f75b386997d5f82c8a8136e8758dac2b770b2ef60bec255c70978046c3e6cd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:52Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.370918 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.371055 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.371087 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.371098 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:52 crc kubenswrapper[4779]: E0929 09:29:52.371104 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 09:29:56.37107967 +0000 UTC m=+28.352403574 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.371117 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:52 crc kubenswrapper[4779]: E0929 09:29:52.371128 4779 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.371058 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.371149 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:52Z","lastTransitionTime":"2025-09-29T09:29:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:52 crc kubenswrapper[4779]: E0929 09:29:52.371189 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 09:29:56.371172413 +0000 UTC m=+28.352496387 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.371249 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:29:52 crc kubenswrapper[4779]: E0929 09:29:52.371344 4779 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 09:29:52 crc kubenswrapper[4779]: E0929 09:29:52.371387 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 09:29:56.37137993 +0000 UTC m=+28.352703834 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.383836 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:52Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.422767 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2tkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b0e23f7-a478-48e2-a745-193a90e87553\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://944ced544427edda31dc5b05053a08a83a71829a896cb6dba002ca20ee3e56e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5rx8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2tkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:52Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.460261 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-twvvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfe38229bcd948f0f8e1e234c8e4ea16d4dbc8c5c4252f5486048db0c9910412\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfe38229bcd948f0f8e1e234c8e4ea16d4dbc8c5c4252f5486048db0c9910412\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-twvvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:52Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.471843 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.471888 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:29:52 crc kubenswrapper[4779]: E0929 09:29:52.472083 4779 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 09:29:52 crc kubenswrapper[4779]: E0929 09:29:52.472114 4779 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 09:29:52 crc kubenswrapper[4779]: E0929 09:29:52.472128 4779 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 09:29:52 crc kubenswrapper[4779]: E0929 09:29:52.472163 4779 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 09:29:52 crc kubenswrapper[4779]: E0929 09:29:52.472209 4779 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 09:29:52 crc kubenswrapper[4779]: E0929 09:29:52.472236 4779 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 09:29:52 crc kubenswrapper[4779]: E0929 09:29:52.472191 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-29 09:29:56.472173323 +0000 UTC m=+28.453497227 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 09:29:52 crc kubenswrapper[4779]: E0929 09:29:52.472388 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-29 09:29:56.472339348 +0000 UTC m=+28.453663282 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.473500 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.473547 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.473561 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.473583 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.473598 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:52Z","lastTransitionTime":"2025-09-29T09:29:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.499299 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e291840-9db5-4515-810a-190428263dfb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6f627c2a2c06fbe034514d02939db8b7576b8cb753aa65a45a271482cf138c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde5335a7ca2db0e56ded3f2fc58506882df09718e6ada6dd1849b891ebc3dd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0f81c275d3f99ff02d4d105dfff34249cff38150c1aa6199bdc765773f0fa2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33e68ef001a27a30ef12923f95eaaef45667d18c8dab2997491caf12f595ed0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6837fc3f6d91505858a90e49945041b663c864b4ebcbd2ecc086c3575b47c1f7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T09:29:42Z\\\",\\\"message\\\":\\\"W0929 09:29:31.795697 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 09:29:31.796138 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759138171 cert, and key in /tmp/serving-cert-3245563206/serving-signer.crt, /tmp/serving-cert-3245563206/serving-signer.key\\\\nI0929 09:29:32.157414 1 observer_polling.go:159] Starting file observer\\\\nW0929 09:29:32.160455 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 09:29:32.160736 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 09:29:32.163324 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3245563206/tls.crt::/tmp/serving-cert-3245563206/tls.key\\\\\\\"\\\\nF0929 09:29:42.581603 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55099c1030bd4943e0df0b13b1203c7143f79edf5e02c8353ccd042610e8059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:52Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.539887 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:52Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.576937 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.576989 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.577001 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.577026 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.577042 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:52Z","lastTransitionTime":"2025-09-29T09:29:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.585697 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60d71749-dfb5-4095-b11b-b70f1a549b88\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ncxc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:52Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.625885 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0846537-9071-453d-968b-7537a7233656\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://103d18548d558c96520d7c4c31d3ef44abc2905afa5c26ca3f93d2a1cd6ff604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ce06a39c8fd0fba9a0bd1af96ff34cfb10a153801ecff617db4df5e64eb476b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe3edd15b34d8223ba98808b34bee098cb2ffa1ac4fd3e949007acc04303e370\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22cc6e6e78a7eaac01a6c53d5e5527ef3120c8c44bf28813e65038f939e79872\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cc3416bb385ad0ff953b03c268f186747c47a2dfdf11ba34a3b00be5391e90e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8442bd56bf7c2e6708a3b0f6c21ba803d9bc76a84d32647b507d5961a78e8621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8442bd56bf7c2e6708a3b0f6c21ba803d9bc76a84d32647b507d5961a78e8621\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b03f58ad046bc93040f00ae10e8f265099417fcb84c10b4ca1707d6744729648\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b03f58ad046bc93040f00ae10e8f265099417fcb84c10b4ca1707d6744729648\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7e2abd0b0dc5d3b95c510100e11f7f2fa4b05786325b692244ea51633d2f597e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e2abd0b0dc5d3b95c510100e11f7f2fa4b05786325b692244ea51633d2f597e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:52Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.660508 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1a5d3a7-37d9-4a87-864c-e4af7f504a19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462a76735c6e380ec32967ba87ebe13a19e1d3687ac87182f01b23ce8fabe8e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7979507aa9ce4c5e1c0f9a0d07716433ab9cebcc02e8b6ba01251ced0bab99f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5lnlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:52Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.680826 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.680939 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.680965 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.680999 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.681022 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:52Z","lastTransitionTime":"2025-09-29T09:29:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.714255 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.714309 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:29:52 crc kubenswrapper[4779]: E0929 09:29:52.714451 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 09:29:52 crc kubenswrapper[4779]: E0929 09:29:52.714551 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.784046 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.784086 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.784097 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.784119 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.784146 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:52Z","lastTransitionTime":"2025-09-29T09:29:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.876511 4779 generic.go:334] "Generic (PLEG): container finished" podID="95f4faf4-0a02-4440-ad6d-2ab0fae56bb6" containerID="acbf355d9614f64771657a4103437e9c55f3b2b5080282d7ad3af70a5f44ffce" exitCode=0 Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.876571 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-twvvx" event={"ID":"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6","Type":"ContainerDied","Data":"acbf355d9614f64771657a4103437e9c55f3b2b5080282d7ad3af70a5f44ffce"} Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.886056 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.886120 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.886146 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.886177 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.886201 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:52Z","lastTransitionTime":"2025-09-29T09:29:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.894597 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:52Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.916570 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2adc78ec8f54811c8fd09a5e558132f9f57501d9a92d4738935148bb4e714eb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:52Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.932138 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5584" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"564bff56-93cb-42ac-bd34-bbe97f99f411\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedbde6cf29bb77c762d7da2d2e414d3e7c2c6edb3addd97473367b89b2e3fe7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvwtv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5584\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:52Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.946871 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gx6f2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25e887ba-720d-4f7b-9763-5703781fd8bf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a1604b3eb7d80cc28d7d36550e740405d3012bb83109444bf7354793ebbdeb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9lw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gx6f2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:52Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.972887 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-twvvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfe38229bcd948f0f8e1e234c8e4ea16d4dbc8c5c4252f5486048db0c9910412\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfe38229bcd948f0f8e1e234c8e4ea16d4dbc8c5c4252f5486048db0c9910412\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acbf355d9614f64771657a4103437e9c55f3b2b5080282d7ad3af70a5f44ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbf355d9614f64771657a4103437e9c55f3b2b5080282d7ad3af70a5f44ffce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-twvvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:52Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.989519 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.989579 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.989594 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.989619 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.989634 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:52Z","lastTransitionTime":"2025-09-29T09:29:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:52 crc kubenswrapper[4779]: I0929 09:29:52.991177 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e291840-9db5-4515-810a-190428263dfb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6f627c2a2c06fbe034514d02939db8b7576b8cb753aa65a45a271482cf138c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde5335a7ca2db0e56ded3f2fc58506882df09718e6ada6dd1849b891ebc3dd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0f81c275d3f99ff02d4d105dfff34249cff38150c1aa6199bdc765773f0fa2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33e68ef001a27a30ef12923f95eaaef45667d18c8dab2997491caf12f595ed0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6837fc3f6d91505858a90e49945041b663c864b4ebcbd2ecc086c3575b47c1f7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T09:29:42Z\\\",\\\"message\\\":\\\"W0929 09:29:31.795697 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 09:29:31.796138 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759138171 cert, and key in /tmp/serving-cert-3245563206/serving-signer.crt, /tmp/serving-cert-3245563206/serving-signer.key\\\\nI0929 09:29:32.157414 1 observer_polling.go:159] Starting file observer\\\\nW0929 09:29:32.160455 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 09:29:32.160736 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 09:29:32.163324 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3245563206/tls.crt::/tmp/serving-cert-3245563206/tls.key\\\\\\\"\\\\nF0929 09:29:42.581603 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55099c1030bd4943e0df0b13b1203c7143f79edf5e02c8353ccd042610e8059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:52Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:53 crc kubenswrapper[4779]: I0929 09:29:53.005736 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:53Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:53 crc kubenswrapper[4779]: I0929 09:29:53.018283 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f96f3b3b0028e52c62e9b886d5f9c3347ec85f91ec0e5ef3de48bd216a2c687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f10690e584c904be791611af52b807ee3d20cb5551821042a1c5e71e1f1571d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:53Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:53 crc kubenswrapper[4779]: I0929 09:29:53.030247 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f75b386997d5f82c8a8136e8758dac2b770b2ef60bec255c70978046c3e6cd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:53Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:53 crc kubenswrapper[4779]: I0929 09:29:53.073944 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:53Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:53 crc kubenswrapper[4779]: I0929 09:29:53.097363 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:53 crc kubenswrapper[4779]: I0929 09:29:53.097400 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:53 crc kubenswrapper[4779]: I0929 09:29:53.097414 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:53 crc kubenswrapper[4779]: I0929 09:29:53.097431 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:53 crc kubenswrapper[4779]: I0929 09:29:53.097442 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:53Z","lastTransitionTime":"2025-09-29T09:29:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:53 crc kubenswrapper[4779]: I0929 09:29:53.104868 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2tkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b0e23f7-a478-48e2-a745-193a90e87553\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://944ced544427edda31dc5b05053a08a83a71829a896cb6dba002ca20ee3e56e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5rx8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2tkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:53Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:53 crc kubenswrapper[4779]: I0929 09:29:53.145121 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0846537-9071-453d-968b-7537a7233656\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://103d18548d558c96520d7c4c31d3ef44abc2905afa5c26ca3f93d2a1cd6ff604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ce06a39c8fd0fba9a0bd1af96ff34cfb10a153801ecff617db4df5e64eb476b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe3edd15b34d8223ba98808b34bee098cb2ffa1ac4fd3e949007acc04303e370\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22cc6e6e78a7eaac01a6c53d5e5527ef3120c8c44bf28813e65038f939e79872\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cc3416bb385ad0ff953b03c268f186747c47a2dfdf11ba34a3b00be5391e90e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8442bd56bf7c2e6708a3b0f6c21ba803d9bc76a84d32647b507d5961a78e8621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8442bd56bf7c2e6708a3b0f6c21ba803d9bc76a84d32647b507d5961a78e8621\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b03f58ad046bc93040f00ae10e8f265099417fcb84c10b4ca1707d6744729648\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b03f58ad046bc93040f00ae10e8f265099417fcb84c10b4ca1707d6744729648\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7e2abd0b0dc5d3b95c510100e11f7f2fa4b05786325b692244ea51633d2f597e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e2abd0b0dc5d3b95c510100e11f7f2fa4b05786325b692244ea51633d2f597e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:53Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:53 crc kubenswrapper[4779]: I0929 09:29:53.176268 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1a5d3a7-37d9-4a87-864c-e4af7f504a19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462a76735c6e380ec32967ba87ebe13a19e1d3687ac87182f01b23ce8fabe8e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7979507aa9ce4c5e1c0f9a0d07716433ab9cebcc02e8b6ba01251ced0bab99f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5lnlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:53Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:53 crc kubenswrapper[4779]: I0929 09:29:53.200140 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:53 crc kubenswrapper[4779]: I0929 09:29:53.200183 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:53 crc kubenswrapper[4779]: I0929 09:29:53.200195 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:53 crc kubenswrapper[4779]: I0929 09:29:53.200212 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:53 crc kubenswrapper[4779]: I0929 09:29:53.200226 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:53Z","lastTransitionTime":"2025-09-29T09:29:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:53 crc kubenswrapper[4779]: I0929 09:29:53.221845 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60d71749-dfb5-4095-b11b-b70f1a549b88\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ncxc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:53Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:53 crc kubenswrapper[4779]: I0929 09:29:53.302448 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:53 crc kubenswrapper[4779]: I0929 09:29:53.302495 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:53 crc kubenswrapper[4779]: I0929 09:29:53.302505 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:53 crc kubenswrapper[4779]: I0929 09:29:53.302518 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:53 crc kubenswrapper[4779]: I0929 09:29:53.302527 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:53Z","lastTransitionTime":"2025-09-29T09:29:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:53 crc kubenswrapper[4779]: I0929 09:29:53.405082 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:53 crc kubenswrapper[4779]: I0929 09:29:53.405121 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:53 crc kubenswrapper[4779]: I0929 09:29:53.405133 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:53 crc kubenswrapper[4779]: I0929 09:29:53.405149 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:53 crc kubenswrapper[4779]: I0929 09:29:53.405161 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:53Z","lastTransitionTime":"2025-09-29T09:29:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:53 crc kubenswrapper[4779]: I0929 09:29:53.506953 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:53 crc kubenswrapper[4779]: I0929 09:29:53.506992 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:53 crc kubenswrapper[4779]: I0929 09:29:53.507002 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:53 crc kubenswrapper[4779]: I0929 09:29:53.507015 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:53 crc kubenswrapper[4779]: I0929 09:29:53.507024 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:53Z","lastTransitionTime":"2025-09-29T09:29:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:53 crc kubenswrapper[4779]: I0929 09:29:53.609713 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:53 crc kubenswrapper[4779]: I0929 09:29:53.609750 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:53 crc kubenswrapper[4779]: I0929 09:29:53.609759 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:53 crc kubenswrapper[4779]: I0929 09:29:53.609775 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:53 crc kubenswrapper[4779]: I0929 09:29:53.609785 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:53Z","lastTransitionTime":"2025-09-29T09:29:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:53 crc kubenswrapper[4779]: I0929 09:29:53.712142 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:53 crc kubenswrapper[4779]: I0929 09:29:53.712197 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:53 crc kubenswrapper[4779]: I0929 09:29:53.712212 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:53 crc kubenswrapper[4779]: I0929 09:29:53.712234 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:53 crc kubenswrapper[4779]: I0929 09:29:53.712252 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:53Z","lastTransitionTime":"2025-09-29T09:29:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:53 crc kubenswrapper[4779]: I0929 09:29:53.713560 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:29:53 crc kubenswrapper[4779]: E0929 09:29:53.713687 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 09:29:53 crc kubenswrapper[4779]: I0929 09:29:53.814743 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:53 crc kubenswrapper[4779]: I0929 09:29:53.814804 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:53 crc kubenswrapper[4779]: I0929 09:29:53.814827 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:53 crc kubenswrapper[4779]: I0929 09:29:53.814853 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:53 crc kubenswrapper[4779]: I0929 09:29:53.814870 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:53Z","lastTransitionTime":"2025-09-29T09:29:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:53 crc kubenswrapper[4779]: I0929 09:29:53.886274 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" event={"ID":"60d71749-dfb5-4095-b11b-b70f1a549b88","Type":"ContainerStarted","Data":"9e140d46db39380a5737d78e1aa00ad1a5a9cc217de910ca4212beae87764795"} Sep 29 09:29:53 crc kubenswrapper[4779]: I0929 09:29:53.905626 4779 generic.go:334] "Generic (PLEG): container finished" podID="95f4faf4-0a02-4440-ad6d-2ab0fae56bb6" containerID="b854329d7e329a9099a032b8e91ab2064164ae0e43e57b61c5c55b42fca2a9ae" exitCode=0 Sep 29 09:29:53 crc kubenswrapper[4779]: I0929 09:29:53.905667 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-twvvx" event={"ID":"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6","Type":"ContainerDied","Data":"b854329d7e329a9099a032b8e91ab2064164ae0e43e57b61c5c55b42fca2a9ae"} Sep 29 09:29:53 crc kubenswrapper[4779]: I0929 09:29:53.917647 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:53 crc kubenswrapper[4779]: I0929 09:29:53.917696 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:53 crc kubenswrapper[4779]: I0929 09:29:53.917710 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:53 crc kubenswrapper[4779]: I0929 09:29:53.917728 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:53 crc kubenswrapper[4779]: I0929 09:29:53.917739 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:53Z","lastTransitionTime":"2025-09-29T09:29:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:53 crc kubenswrapper[4779]: I0929 09:29:53.934179 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-twvvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfe38229bcd948f0f8e1e234c8e4ea16d4dbc8c5c4252f5486048db0c9910412\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfe38229bcd948f0f8e1e234c8e4ea16d4dbc8c5c4252f5486048db0c9910412\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acbf355d9614f64771657a4103437e9c55f3b2b5080282d7ad3af70a5f44ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbf355d9614f64771657a4103437e9c55f3b2b5080282d7ad3af70a5f44ffce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b854329d7e329a9099a032b8e91ab2064164ae0e43e57b61c5c55b42fca2a9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b854329d7e329a9099a032b8e91ab2064164ae0e43e57b61c5c55b42fca2a9ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-twvvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:53Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:53 crc kubenswrapper[4779]: I0929 09:29:53.950396 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e291840-9db5-4515-810a-190428263dfb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6f627c2a2c06fbe034514d02939db8b7576b8cb753aa65a45a271482cf138c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde5335a7ca2db0e56ded3f2fc58506882df09718e6ada6dd1849b891ebc3dd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0f81c275d3f99ff02d4d105dfff34249cff38150c1aa6199bdc765773f0fa2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33e68ef001a27a30ef12923f95eaaef45667d18c8dab2997491caf12f595ed0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6837fc3f6d91505858a90e49945041b663c864b4ebcbd2ecc086c3575b47c1f7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T09:29:42Z\\\",\\\"message\\\":\\\"W0929 09:29:31.795697 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 09:29:31.796138 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759138171 cert, and key in /tmp/serving-cert-3245563206/serving-signer.crt, /tmp/serving-cert-3245563206/serving-signer.key\\\\nI0929 09:29:32.157414 1 observer_polling.go:159] Starting file observer\\\\nW0929 09:29:32.160455 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 09:29:32.160736 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 09:29:32.163324 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3245563206/tls.crt::/tmp/serving-cert-3245563206/tls.key\\\\\\\"\\\\nF0929 09:29:42.581603 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55099c1030bd4943e0df0b13b1203c7143f79edf5e02c8353ccd042610e8059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:53Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:53 crc kubenswrapper[4779]: I0929 09:29:53.968578 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:53Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:53 crc kubenswrapper[4779]: I0929 09:29:53.980817 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f96f3b3b0028e52c62e9b886d5f9c3347ec85f91ec0e5ef3de48bd216a2c687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f10690e584c904be791611af52b807ee3d20cb5551821042a1c5e71e1f1571d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:53Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:53 crc kubenswrapper[4779]: I0929 09:29:53.993699 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f75b386997d5f82c8a8136e8758dac2b770b2ef60bec255c70978046c3e6cd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:53Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.006041 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:54Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.016111 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2tkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b0e23f7-a478-48e2-a745-193a90e87553\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://944ced544427edda31dc5b05053a08a83a71829a896cb6dba002ca20ee3e56e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5rx8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2tkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:54Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.021214 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.021247 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.021264 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.021284 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.021298 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:54Z","lastTransitionTime":"2025-09-29T09:29:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.036548 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0846537-9071-453d-968b-7537a7233656\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://103d18548d558c96520d7c4c31d3ef44abc2905afa5c26ca3f93d2a1cd6ff604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ce06a39c8fd0fba9a0bd1af96ff34cfb10a153801ecff617db4df5e64eb476b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe3edd15b34d8223ba98808b34bee098cb2ffa1ac4fd3e949007acc04303e370\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22cc6e6e78a7eaac01a6c53d5e5527ef3120c8c44bf28813e65038f939e79872\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cc3416bb385ad0ff953b03c268f186747c47a2dfdf11ba34a3b00be5391e90e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8442bd56bf7c2e6708a3b0f6c21ba803d9bc76a84d32647b507d5961a78e8621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8442bd56bf7c2e6708a3b0f6c21ba803d9bc76a84d32647b507d5961a78e8621\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b03f58ad046bc93040f00ae10e8f265099417fcb84c10b4ca1707d6744729648\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b03f58ad046bc93040f00ae10e8f265099417fcb84c10b4ca1707d6744729648\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7e2abd0b0dc5d3b95c510100e11f7f2fa4b05786325b692244ea51633d2f597e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e2abd0b0dc5d3b95c510100e11f7f2fa4b05786325b692244ea51633d2f597e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:54Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.049694 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1a5d3a7-37d9-4a87-864c-e4af7f504a19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462a76735c6e380ec32967ba87ebe13a19e1d3687ac87182f01b23ce8fabe8e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7979507aa9ce4c5e1c0f9a0d07716433ab9cebcc02e8b6ba01251ced0bab99f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5lnlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:54Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.073458 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60d71749-dfb5-4095-b11b-b70f1a549b88\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ncxc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:54Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.085045 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:54Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.105157 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2adc78ec8f54811c8fd09a5e558132f9f57501d9a92d4738935148bb4e714eb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:54Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.122088 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5584" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"564bff56-93cb-42ac-bd34-bbe97f99f411\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedbde6cf29bb77c762d7da2d2e414d3e7c2c6edb3addd97473367b89b2e3fe7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvwtv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5584\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:54Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.124210 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.124261 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.124284 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.124316 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.124340 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:54Z","lastTransitionTime":"2025-09-29T09:29:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.139856 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gx6f2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25e887ba-720d-4f7b-9763-5703781fd8bf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a1604b3eb7d80cc28d7d36550e740405d3012bb83109444bf7354793ebbdeb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9lw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gx6f2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:54Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.226676 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.226705 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.226716 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.226731 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.226743 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:54Z","lastTransitionTime":"2025-09-29T09:29:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.330405 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.330449 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.330462 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.330481 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.330493 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:54Z","lastTransitionTime":"2025-09-29T09:29:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.432573 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.432630 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.432648 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.432672 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.432700 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:54Z","lastTransitionTime":"2025-09-29T09:29:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.498737 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.503547 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.509213 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.512437 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:54Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.527063 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2adc78ec8f54811c8fd09a5e558132f9f57501d9a92d4738935148bb4e714eb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:54Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.536086 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.536153 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.536177 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.536206 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.536227 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:54Z","lastTransitionTime":"2025-09-29T09:29:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.539770 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5584" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"564bff56-93cb-42ac-bd34-bbe97f99f411\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedbde6cf29bb77c762d7da2d2e414d3e7c2c6edb3addd97473367b89b2e3fe7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvwtv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5584\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:54Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.552432 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gx6f2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25e887ba-720d-4f7b-9763-5703781fd8bf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a1604b3eb7d80cc28d7d36550e740405d3012bb83109444bf7354793ebbdeb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9lw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gx6f2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:54Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.569221 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2tkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b0e23f7-a478-48e2-a745-193a90e87553\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://944ced544427edda31dc5b05053a08a83a71829a896cb6dba002ca20ee3e56e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5rx8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2tkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:54Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.591566 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-twvvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfe38229bcd948f0f8e1e234c8e4ea16d4dbc8c5c4252f5486048db0c9910412\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfe38229bcd948f0f8e1e234c8e4ea16d4dbc8c5c4252f5486048db0c9910412\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acbf355d9614f64771657a4103437e9c55f3b2b5080282d7ad3af70a5f44ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbf355d9614f64771657a4103437e9c55f3b2b5080282d7ad3af70a5f44ffce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b854329d7e329a9099a032b8e91ab2064164ae0e43e57b61c5c55b42fca2a9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b854329d7e329a9099a032b8e91ab2064164ae0e43e57b61c5c55b42fca2a9ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-twvvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:54Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.611632 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e291840-9db5-4515-810a-190428263dfb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6f627c2a2c06fbe034514d02939db8b7576b8cb753aa65a45a271482cf138c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde5335a7ca2db0e56ded3f2fc58506882df09718e6ada6dd1849b891ebc3dd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0f81c275d3f99ff02d4d105dfff34249cff38150c1aa6199bdc765773f0fa2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33e68ef001a27a30ef12923f95eaaef45667d18c8dab2997491caf12f595ed0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6837fc3f6d91505858a90e49945041b663c864b4ebcbd2ecc086c3575b47c1f7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T09:29:42Z\\\",\\\"message\\\":\\\"W0929 09:29:31.795697 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 09:29:31.796138 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759138171 cert, and key in /tmp/serving-cert-3245563206/serving-signer.crt, /tmp/serving-cert-3245563206/serving-signer.key\\\\nI0929 09:29:32.157414 1 observer_polling.go:159] Starting file observer\\\\nW0929 09:29:32.160455 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 09:29:32.160736 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 09:29:32.163324 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3245563206/tls.crt::/tmp/serving-cert-3245563206/tls.key\\\\\\\"\\\\nF0929 09:29:42.581603 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55099c1030bd4943e0df0b13b1203c7143f79edf5e02c8353ccd042610e8059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:54Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.630397 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:54Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.638810 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.638836 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.638845 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.638859 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.638867 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:54Z","lastTransitionTime":"2025-09-29T09:29:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.645387 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f96f3b3b0028e52c62e9b886d5f9c3347ec85f91ec0e5ef3de48bd216a2c687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f10690e584c904be791611af52b807ee3d20cb5551821042a1c5e71e1f1571d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:54Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.660421 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f75b386997d5f82c8a8136e8758dac2b770b2ef60bec255c70978046c3e6cd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:54Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.682210 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:54Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.710287 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0846537-9071-453d-968b-7537a7233656\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://103d18548d558c96520d7c4c31d3ef44abc2905afa5c26ca3f93d2a1cd6ff604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ce06a39c8fd0fba9a0bd1af96ff34cfb10a153801ecff617db4df5e64eb476b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe3edd15b34d8223ba98808b34bee098cb2ffa1ac4fd3e949007acc04303e370\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22cc6e6e78a7eaac01a6c53d5e5527ef3120c8c44bf28813e65038f939e79872\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cc3416bb385ad0ff953b03c268f186747c47a2dfdf11ba34a3b00be5391e90e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8442bd56bf7c2e6708a3b0f6c21ba803d9bc76a84d32647b507d5961a78e8621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8442bd56bf7c2e6708a3b0f6c21ba803d9bc76a84d32647b507d5961a78e8621\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b03f58ad046bc93040f00ae10e8f265099417fcb84c10b4ca1707d6744729648\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b03f58ad046bc93040f00ae10e8f265099417fcb84c10b4ca1707d6744729648\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7e2abd0b0dc5d3b95c510100e11f7f2fa4b05786325b692244ea51633d2f597e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e2abd0b0dc5d3b95c510100e11f7f2fa4b05786325b692244ea51633d2f597e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:54Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.713443 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.713579 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:29:54 crc kubenswrapper[4779]: E0929 09:29:54.713632 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 09:29:54 crc kubenswrapper[4779]: E0929 09:29:54.713837 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.724164 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1a5d3a7-37d9-4a87-864c-e4af7f504a19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462a76735c6e380ec32967ba87ebe13a19e1d3687ac87182f01b23ce8fabe8e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7979507aa9ce4c5e1c0f9a0d07716433ab9cebcc02e8b6ba01251ced0bab99f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5lnlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:54Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.741728 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.741782 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.741800 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.741826 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.741845 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:54Z","lastTransitionTime":"2025-09-29T09:29:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.743866 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60d71749-dfb5-4095-b11b-b70f1a549b88\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ncxc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:54Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.760789 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60d71749-dfb5-4095-b11b-b70f1a549b88\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ncxc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:54Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.779596 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0846537-9071-453d-968b-7537a7233656\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://103d18548d558c96520d7c4c31d3ef44abc2905afa5c26ca3f93d2a1cd6ff604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ce06a39c8fd0fba9a0bd1af96ff34cfb10a153801ecff617db4df5e64eb476b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe3edd15b34d8223ba98808b34bee098cb2ffa1ac4fd3e949007acc04303e370\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22cc6e6e78a7eaac01a6c53d5e5527ef3120c8c44bf28813e65038f939e79872\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cc3416bb385ad0ff953b03c268f186747c47a2dfdf11ba34a3b00be5391e90e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8442bd56bf7c2e6708a3b0f6c21ba803d9bc76a84d32647b507d5961a78e8621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8442bd56bf7c2e6708a3b0f6c21ba803d9bc76a84d32647b507d5961a78e8621\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b03f58ad046bc93040f00ae10e8f265099417fcb84c10b4ca1707d6744729648\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b03f58ad046bc93040f00ae10e8f265099417fcb84c10b4ca1707d6744729648\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7e2abd0b0dc5d3b95c510100e11f7f2fa4b05786325b692244ea51633d2f597e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e2abd0b0dc5d3b95c510100e11f7f2fa4b05786325b692244ea51633d2f597e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:54Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.794528 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19a67159-61a5-4f75-940a-212c850bd498\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ff866eb8fdb247e8bfe638be9525a31733ed912047dcbbc7514ab6324675b6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a69616a6ca151dfda518aae66819aac108f73967794661b878db2480ffbb5b16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4bb91e73e9e2666ab82d5a13fee96ed7406f6178863603e607766ae3172d3ee6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2148aa80d9f71c8424530711e10af5b0bd10437655123ac57b87d1c90a280233\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:54Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.811621 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1a5d3a7-37d9-4a87-864c-e4af7f504a19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462a76735c6e380ec32967ba87ebe13a19e1d3687ac87182f01b23ce8fabe8e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7979507aa9ce4c5e1c0f9a0d07716433ab9cebcc02e8b6ba01251ced0bab99f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5lnlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:54Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.827759 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:54Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.842763 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2adc78ec8f54811c8fd09a5e558132f9f57501d9a92d4738935148bb4e714eb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:54Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.845519 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.845553 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.845564 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.845582 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.845595 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:54Z","lastTransitionTime":"2025-09-29T09:29:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.854135 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5584" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"564bff56-93cb-42ac-bd34-bbe97f99f411\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedbde6cf29bb77c762d7da2d2e414d3e7c2c6edb3addd97473367b89b2e3fe7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvwtv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5584\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:54Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.865423 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gx6f2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25e887ba-720d-4f7b-9763-5703781fd8bf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a1604b3eb7d80cc28d7d36550e740405d3012bb83109444bf7354793ebbdeb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9lw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gx6f2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:54Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.877663 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f96f3b3b0028e52c62e9b886d5f9c3347ec85f91ec0e5ef3de48bd216a2c687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f10690e584c904be791611af52b807ee3d20cb5551821042a1c5e71e1f1571d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:54Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.889522 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f75b386997d5f82c8a8136e8758dac2b770b2ef60bec255c70978046c3e6cd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:54Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.904263 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:54Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.914732 4779 generic.go:334] "Generic (PLEG): container finished" podID="95f4faf4-0a02-4440-ad6d-2ab0fae56bb6" containerID="93312b300c9348ca8caa0f12bd7825d6b7d1d720babd765aa679c570220ac15a" exitCode=0 Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.914961 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-twvvx" event={"ID":"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6","Type":"ContainerDied","Data":"93312b300c9348ca8caa0f12bd7825d6b7d1d720babd765aa679c570220ac15a"} Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.916703 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2tkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b0e23f7-a478-48e2-a745-193a90e87553\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://944ced544427edda31dc5b05053a08a83a71829a896cb6dba002ca20ee3e56e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5rx8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2tkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:54Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.932419 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-twvvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfe38229bcd948f0f8e1e234c8e4ea16d4dbc8c5c4252f5486048db0c9910412\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfe38229bcd948f0f8e1e234c8e4ea16d4dbc8c5c4252f5486048db0c9910412\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acbf355d9614f64771657a4103437e9c55f3b2b5080282d7ad3af70a5f44ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbf355d9614f64771657a4103437e9c55f3b2b5080282d7ad3af70a5f44ffce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b854329d7e329a9099a032b8e91ab2064164ae0e43e57b61c5c55b42fca2a9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b854329d7e329a9099a032b8e91ab2064164ae0e43e57b61c5c55b42fca2a9ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-twvvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:54Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.947498 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e291840-9db5-4515-810a-190428263dfb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6f627c2a2c06fbe034514d02939db8b7576b8cb753aa65a45a271482cf138c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde5335a7ca2db0e56ded3f2fc58506882df09718e6ada6dd1849b891ebc3dd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0f81c275d3f99ff02d4d105dfff34249cff38150c1aa6199bdc765773f0fa2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33e68ef001a27a30ef12923f95eaaef45667d18c8dab2997491caf12f595ed0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6837fc3f6d91505858a90e49945041b663c864b4ebcbd2ecc086c3575b47c1f7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T09:29:42Z\\\",\\\"message\\\":\\\"W0929 09:29:31.795697 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 09:29:31.796138 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759138171 cert, and key in /tmp/serving-cert-3245563206/serving-signer.crt, /tmp/serving-cert-3245563206/serving-signer.key\\\\nI0929 09:29:32.157414 1 observer_polling.go:159] Starting file observer\\\\nW0929 09:29:32.160455 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 09:29:32.160736 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 09:29:32.163324 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3245563206/tls.crt::/tmp/serving-cert-3245563206/tls.key\\\\\\\"\\\\nF0929 09:29:42.581603 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55099c1030bd4943e0df0b13b1203c7143f79edf5e02c8353ccd042610e8059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:54Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.954978 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.955016 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.955033 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.955051 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.955065 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:54Z","lastTransitionTime":"2025-09-29T09:29:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:54 crc kubenswrapper[4779]: I0929 09:29:54.961414 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:54Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.001555 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60d71749-dfb5-4095-b11b-b70f1a549b88\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ncxc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:54Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.045102 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0846537-9071-453d-968b-7537a7233656\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://103d18548d558c96520d7c4c31d3ef44abc2905afa5c26ca3f93d2a1cd6ff604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ce06a39c8fd0fba9a0bd1af96ff34cfb10a153801ecff617db4df5e64eb476b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe3edd15b34d8223ba98808b34bee098cb2ffa1ac4fd3e949007acc04303e370\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22cc6e6e78a7eaac01a6c53d5e5527ef3120c8c44bf28813e65038f939e79872\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cc3416bb385ad0ff953b03c268f186747c47a2dfdf11ba34a3b00be5391e90e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8442bd56bf7c2e6708a3b0f6c21ba803d9bc76a84d32647b507d5961a78e8621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8442bd56bf7c2e6708a3b0f6c21ba803d9bc76a84d32647b507d5961a78e8621\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b03f58ad046bc93040f00ae10e8f265099417fcb84c10b4ca1707d6744729648\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b03f58ad046bc93040f00ae10e8f265099417fcb84c10b4ca1707d6744729648\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7e2abd0b0dc5d3b95c510100e11f7f2fa4b05786325b692244ea51633d2f597e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e2abd0b0dc5d3b95c510100e11f7f2fa4b05786325b692244ea51633d2f597e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:55Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.057201 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.057234 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.057244 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.057258 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.057267 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:55Z","lastTransitionTime":"2025-09-29T09:29:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.078829 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19a67159-61a5-4f75-940a-212c850bd498\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ff866eb8fdb247e8bfe638be9525a31733ed912047dcbbc7514ab6324675b6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a69616a6ca151dfda518aae66819aac108f73967794661b878db2480ffbb5b16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4bb91e73e9e2666ab82d5a13fee96ed7406f6178863603e607766ae3172d3ee6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2148aa80d9f71c8424530711e10af5b0bd10437655123ac57b87d1c90a280233\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:55Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.115712 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1a5d3a7-37d9-4a87-864c-e4af7f504a19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462a76735c6e380ec32967ba87ebe13a19e1d3687ac87182f01b23ce8fabe8e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7979507aa9ce4c5e1c0f9a0d07716433ab9cebcc02e8b6ba01251ced0bab99f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5lnlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:55Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.157805 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:55Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.160016 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.160046 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.160056 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.160074 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.160085 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:55Z","lastTransitionTime":"2025-09-29T09:29:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.198622 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2adc78ec8f54811c8fd09a5e558132f9f57501d9a92d4738935148bb4e714eb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:55Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.236096 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5584" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"564bff56-93cb-42ac-bd34-bbe97f99f411\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedbde6cf29bb77c762d7da2d2e414d3e7c2c6edb3addd97473367b89b2e3fe7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvwtv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5584\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:55Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.262327 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.262354 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.262361 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.262376 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.262385 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:55Z","lastTransitionTime":"2025-09-29T09:29:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.277396 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gx6f2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25e887ba-720d-4f7b-9763-5703781fd8bf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a1604b3eb7d80cc28d7d36550e740405d3012bb83109444bf7354793ebbdeb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9lw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gx6f2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:55Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.320809 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f96f3b3b0028e52c62e9b886d5f9c3347ec85f91ec0e5ef3de48bd216a2c687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f10690e584c904be791611af52b807ee3d20cb5551821042a1c5e71e1f1571d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:55Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.358008 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f75b386997d5f82c8a8136e8758dac2b770b2ef60bec255c70978046c3e6cd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:55Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.364469 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.364494 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.364502 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.364515 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.364525 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:55Z","lastTransitionTime":"2025-09-29T09:29:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.399548 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:55Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.444588 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2tkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b0e23f7-a478-48e2-a745-193a90e87553\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://944ced544427edda31dc5b05053a08a83a71829a896cb6dba002ca20ee3e56e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5rx8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2tkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:55Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.467231 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.467267 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.467276 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.467290 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.467300 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:55Z","lastTransitionTime":"2025-09-29T09:29:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.486104 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-twvvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfe38229bcd948f0f8e1e234c8e4ea16d4dbc8c5c4252f5486048db0c9910412\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfe38229bcd948f0f8e1e234c8e4ea16d4dbc8c5c4252f5486048db0c9910412\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acbf355d9614f64771657a4103437e9c55f3b2b5080282d7ad3af70a5f44ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbf355d9614f64771657a4103437e9c55f3b2b5080282d7ad3af70a5f44ffce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b854329d7e329a9099a032b8e91ab2064164ae0e43e57b61c5c55b42fca2a9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b854329d7e329a9099a032b8e91ab2064164ae0e43e57b61c5c55b42fca2a9ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93312b300c9348ca8caa0f12bd7825d6b7d1d720babd765aa679c570220ac15a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93312b300c9348ca8caa0f12bd7825d6b7d1d720babd765aa679c570220ac15a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-twvvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:55Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.522599 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e291840-9db5-4515-810a-190428263dfb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6f627c2a2c06fbe034514d02939db8b7576b8cb753aa65a45a271482cf138c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde5335a7ca2db0e56ded3f2fc58506882df09718e6ada6dd1849b891ebc3dd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0f81c275d3f99ff02d4d105dfff34249cff38150c1aa6199bdc765773f0fa2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33e68ef001a27a30ef12923f95eaaef45667d18c8dab2997491caf12f595ed0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6837fc3f6d91505858a90e49945041b663c864b4ebcbd2ecc086c3575b47c1f7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T09:29:42Z\\\",\\\"message\\\":\\\"W0929 09:29:31.795697 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 09:29:31.796138 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759138171 cert, and key in /tmp/serving-cert-3245563206/serving-signer.crt, /tmp/serving-cert-3245563206/serving-signer.key\\\\nI0929 09:29:32.157414 1 observer_polling.go:159] Starting file observer\\\\nW0929 09:29:32.160455 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 09:29:32.160736 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 09:29:32.163324 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3245563206/tls.crt::/tmp/serving-cert-3245563206/tls.key\\\\\\\"\\\\nF0929 09:29:42.581603 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55099c1030bd4943e0df0b13b1203c7143f79edf5e02c8353ccd042610e8059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:55Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.558476 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:55Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.569134 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.569156 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.569167 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.569182 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.569193 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:55Z","lastTransitionTime":"2025-09-29T09:29:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.671508 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.671538 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.671549 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.671566 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.671577 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:55Z","lastTransitionTime":"2025-09-29T09:29:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.713915 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:29:55 crc kubenswrapper[4779]: E0929 09:29:55.714023 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.774275 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.774306 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.774317 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.774334 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.774345 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:55Z","lastTransitionTime":"2025-09-29T09:29:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.877281 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.877318 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.877328 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.877342 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.877352 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:55Z","lastTransitionTime":"2025-09-29T09:29:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.923556 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-twvvx" event={"ID":"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6","Type":"ContainerStarted","Data":"d3a42c70c25df51312e2e5ede663678cbccf94d50dad43886557024669dd2e82"} Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.930163 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" event={"ID":"60d71749-dfb5-4095-b11b-b70f1a549b88","Type":"ContainerStarted","Data":"577276f8e790a07af7c396bfdfef2adc46c85d04eb3948f00aceef15e4124988"} Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.930522 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.930574 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.937011 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:55Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.952361 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2adc78ec8f54811c8fd09a5e558132f9f57501d9a92d4738935148bb4e714eb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:55Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.964538 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5584" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"564bff56-93cb-42ac-bd34-bbe97f99f411\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedbde6cf29bb77c762d7da2d2e414d3e7c2c6edb3addd97473367b89b2e3fe7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvwtv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5584\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:55Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.968968 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.969026 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.980464 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gx6f2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25e887ba-720d-4f7b-9763-5703781fd8bf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a1604b3eb7d80cc28d7d36550e740405d3012bb83109444bf7354793ebbdeb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9lw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gx6f2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:55Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.986009 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.986076 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.986113 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.986146 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:55 crc kubenswrapper[4779]: I0929 09:29:55.986171 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:55Z","lastTransitionTime":"2025-09-29T09:29:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.003783 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-twvvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3a42c70c25df51312e2e5ede663678cbccf94d50dad43886557024669dd2e82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfe38229bcd948f0f8e1e234c8e4ea16d4dbc8c5c4252f5486048db0c9910412\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfe38229bcd948f0f8e1e234c8e4ea16d4dbc8c5c4252f5486048db0c9910412\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acbf355d9614f64771657a4103437e9c55f3b2b5080282d7ad3af70a5f44ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbf355d9614f64771657a4103437e9c55f3b2b5080282d7ad3af70a5f44ffce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b854329d7e329a9099a032b8e91ab2064164ae0e43e57b61c5c55b42fca2a9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b854329d7e329a9099a032b8e91ab2064164ae0e43e57b61c5c55b42fca2a9ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93312b300c9348ca8caa0f12bd7825d6b7d1d720babd765aa679c570220ac15a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93312b300c9348ca8caa0f12bd7825d6b7d1d720babd765aa679c570220ac15a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-twvvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:56Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.019886 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e291840-9db5-4515-810a-190428263dfb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6f627c2a2c06fbe034514d02939db8b7576b8cb753aa65a45a271482cf138c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde5335a7ca2db0e56ded3f2fc58506882df09718e6ada6dd1849b891ebc3dd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0f81c275d3f99ff02d4d105dfff34249cff38150c1aa6199bdc765773f0fa2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33e68ef001a27a30ef12923f95eaaef45667d18c8dab2997491caf12f595ed0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6837fc3f6d91505858a90e49945041b663c864b4ebcbd2ecc086c3575b47c1f7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T09:29:42Z\\\",\\\"message\\\":\\\"W0929 09:29:31.795697 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 09:29:31.796138 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759138171 cert, and key in /tmp/serving-cert-3245563206/serving-signer.crt, /tmp/serving-cert-3245563206/serving-signer.key\\\\nI0929 09:29:32.157414 1 observer_polling.go:159] Starting file observer\\\\nW0929 09:29:32.160455 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 09:29:32.160736 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 09:29:32.163324 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3245563206/tls.crt::/tmp/serving-cert-3245563206/tls.key\\\\\\\"\\\\nF0929 09:29:42.581603 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55099c1030bd4943e0df0b13b1203c7143f79edf5e02c8353ccd042610e8059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:56Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.033213 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:56Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.045953 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f96f3b3b0028e52c62e9b886d5f9c3347ec85f91ec0e5ef3de48bd216a2c687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f10690e584c904be791611af52b807ee3d20cb5551821042a1c5e71e1f1571d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:56Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.056640 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f75b386997d5f82c8a8136e8758dac2b770b2ef60bec255c70978046c3e6cd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:56Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.067136 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:56Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.077219 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2tkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b0e23f7-a478-48e2-a745-193a90e87553\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://944ced544427edda31dc5b05053a08a83a71829a896cb6dba002ca20ee3e56e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5rx8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2tkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:56Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.089260 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.089299 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.089309 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.089323 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.089333 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:56Z","lastTransitionTime":"2025-09-29T09:29:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.095116 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0846537-9071-453d-968b-7537a7233656\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://103d18548d558c96520d7c4c31d3ef44abc2905afa5c26ca3f93d2a1cd6ff604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ce06a39c8fd0fba9a0bd1af96ff34cfb10a153801ecff617db4df5e64eb476b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe3edd15b34d8223ba98808b34bee098cb2ffa1ac4fd3e949007acc04303e370\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22cc6e6e78a7eaac01a6c53d5e5527ef3120c8c44bf28813e65038f939e79872\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cc3416bb385ad0ff953b03c268f186747c47a2dfdf11ba34a3b00be5391e90e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8442bd56bf7c2e6708a3b0f6c21ba803d9bc76a84d32647b507d5961a78e8621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8442bd56bf7c2e6708a3b0f6c21ba803d9bc76a84d32647b507d5961a78e8621\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b03f58ad046bc93040f00ae10e8f265099417fcb84c10b4ca1707d6744729648\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b03f58ad046bc93040f00ae10e8f265099417fcb84c10b4ca1707d6744729648\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7e2abd0b0dc5d3b95c510100e11f7f2fa4b05786325b692244ea51633d2f597e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e2abd0b0dc5d3b95c510100e11f7f2fa4b05786325b692244ea51633d2f597e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:56Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.108164 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19a67159-61a5-4f75-940a-212c850bd498\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ff866eb8fdb247e8bfe638be9525a31733ed912047dcbbc7514ab6324675b6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a69616a6ca151dfda518aae66819aac108f73967794661b878db2480ffbb5b16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4bb91e73e9e2666ab82d5a13fee96ed7406f6178863603e607766ae3172d3ee6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2148aa80d9f71c8424530711e10af5b0bd10437655123ac57b87d1c90a280233\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:56Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.118611 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1a5d3a7-37d9-4a87-864c-e4af7f504a19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462a76735c6e380ec32967ba87ebe13a19e1d3687ac87182f01b23ce8fabe8e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7979507aa9ce4c5e1c0f9a0d07716433ab9cebcc02e8b6ba01251ced0bab99f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5lnlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:56Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.163068 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60d71749-dfb5-4095-b11b-b70f1a549b88\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ncxc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:56Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.191764 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.191820 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.191832 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.191852 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.191864 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:56Z","lastTransitionTime":"2025-09-29T09:29:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.198344 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:56Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.235958 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5584" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"564bff56-93cb-42ac-bd34-bbe97f99f411\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedbde6cf29bb77c762d7da2d2e414d3e7c2c6edb3addd97473367b89b2e3fe7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvwtv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5584\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:56Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.278794 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gx6f2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25e887ba-720d-4f7b-9763-5703781fd8bf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a1604b3eb7d80cc28d7d36550e740405d3012bb83109444bf7354793ebbdeb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9lw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gx6f2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:56Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.293623 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.293657 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.293666 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.293680 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.293692 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:56Z","lastTransitionTime":"2025-09-29T09:29:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.319710 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2adc78ec8f54811c8fd09a5e558132f9f57501d9a92d4738935148bb4e714eb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:56Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.357487 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:56Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.396890 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.396958 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.396968 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.396984 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.396994 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:56Z","lastTransitionTime":"2025-09-29T09:29:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.399569 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f96f3b3b0028e52c62e9b886d5f9c3347ec85f91ec0e5ef3de48bd216a2c687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f10690e584c904be791611af52b807ee3d20cb5551821042a1c5e71e1f1571d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:56Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.450710 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.450811 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:29:56 crc kubenswrapper[4779]: E0929 09:29:56.450868 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 09:30:04.4508477 +0000 UTC m=+36.432171614 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:29:56 crc kubenswrapper[4779]: E0929 09:29:56.450882 4779 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 09:29:56 crc kubenswrapper[4779]: E0929 09:29:56.450946 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 09:30:04.450933733 +0000 UTC m=+36.432257637 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.451041 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:29:56 crc kubenswrapper[4779]: E0929 09:29:56.451156 4779 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 09:29:56 crc kubenswrapper[4779]: E0929 09:29:56.451209 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 09:30:04.451198191 +0000 UTC m=+36.432522115 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.459892 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f75b386997d5f82c8a8136e8758dac2b770b2ef60bec255c70978046c3e6cd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:56Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.478444 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:56Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.499389 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.499430 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.499441 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.499458 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.499469 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:56Z","lastTransitionTime":"2025-09-29T09:29:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.522450 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2tkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b0e23f7-a478-48e2-a745-193a90e87553\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://944ced544427edda31dc5b05053a08a83a71829a896cb6dba002ca20ee3e56e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5rx8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2tkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:56Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.551984 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.552035 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:29:56 crc kubenswrapper[4779]: E0929 09:29:56.552137 4779 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 09:29:56 crc kubenswrapper[4779]: E0929 09:29:56.552157 4779 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 09:29:56 crc kubenswrapper[4779]: E0929 09:29:56.552167 4779 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 09:29:56 crc kubenswrapper[4779]: E0929 09:29:56.552210 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-29 09:30:04.552196771 +0000 UTC m=+36.533520675 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 09:29:56 crc kubenswrapper[4779]: E0929 09:29:56.552239 4779 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 09:29:56 crc kubenswrapper[4779]: E0929 09:29:56.552305 4779 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 09:29:56 crc kubenswrapper[4779]: E0929 09:29:56.552321 4779 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 09:29:56 crc kubenswrapper[4779]: E0929 09:29:56.552374 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-29 09:30:04.552358926 +0000 UTC m=+36.533682840 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.566817 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-twvvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3a42c70c25df51312e2e5ede663678cbccf94d50dad43886557024669dd2e82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfe38229bcd948f0f8e1e234c8e4ea16d4dbc8c5c4252f5486048db0c9910412\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfe38229bcd948f0f8e1e234c8e4ea16d4dbc8c5c4252f5486048db0c9910412\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acbf355d9614f64771657a4103437e9c55f3b2b5080282d7ad3af70a5f44ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbf355d9614f64771657a4103437e9c55f3b2b5080282d7ad3af70a5f44ffce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b854329d7e329a9099a032b8e91ab2064164ae0e43e57b61c5c55b42fca2a9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b854329d7e329a9099a032b8e91ab2064164ae0e43e57b61c5c55b42fca2a9ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93312b300c9348ca8caa0f12bd7825d6b7d1d720babd765aa679c570220ac15a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93312b300c9348ca8caa0f12bd7825d6b7d1d720babd765aa679c570220ac15a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-twvvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:56Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.600918 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e291840-9db5-4515-810a-190428263dfb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6f627c2a2c06fbe034514d02939db8b7576b8cb753aa65a45a271482cf138c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde5335a7ca2db0e56ded3f2fc58506882df09718e6ada6dd1849b891ebc3dd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0f81c275d3f99ff02d4d105dfff34249cff38150c1aa6199bdc765773f0fa2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33e68ef001a27a30ef12923f95eaaef45667d18c8dab2997491caf12f595ed0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6837fc3f6d91505858a90e49945041b663c864b4ebcbd2ecc086c3575b47c1f7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T09:29:42Z\\\",\\\"message\\\":\\\"W0929 09:29:31.795697 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 09:29:31.796138 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759138171 cert, and key in /tmp/serving-cert-3245563206/serving-signer.crt, /tmp/serving-cert-3245563206/serving-signer.key\\\\nI0929 09:29:32.157414 1 observer_polling.go:159] Starting file observer\\\\nW0929 09:29:32.160455 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 09:29:32.160736 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 09:29:32.163324 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3245563206/tls.crt::/tmp/serving-cert-3245563206/tls.key\\\\\\\"\\\\nF0929 09:29:42.581603 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55099c1030bd4943e0df0b13b1203c7143f79edf5e02c8353ccd042610e8059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:56Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.601875 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.601938 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.601956 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.601975 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.601987 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:56Z","lastTransitionTime":"2025-09-29T09:29:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.640216 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19a67159-61a5-4f75-940a-212c850bd498\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ff866eb8fdb247e8bfe638be9525a31733ed912047dcbbc7514ab6324675b6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a69616a6ca151dfda518aae66819aac108f73967794661b878db2480ffbb5b16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4bb91e73e9e2666ab82d5a13fee96ed7406f6178863603e607766ae3172d3ee6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2148aa80d9f71c8424530711e10af5b0bd10437655123ac57b87d1c90a280233\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:56Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.680593 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1a5d3a7-37d9-4a87-864c-e4af7f504a19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462a76735c6e380ec32967ba87ebe13a19e1d3687ac87182f01b23ce8fabe8e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7979507aa9ce4c5e1c0f9a0d07716433ab9cebcc02e8b6ba01251ced0bab99f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5lnlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:56Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.704861 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.704918 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.704929 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.704945 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.704955 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:56Z","lastTransitionTime":"2025-09-29T09:29:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.715069 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.715092 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:29:56 crc kubenswrapper[4779]: E0929 09:29:56.715180 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 09:29:56 crc kubenswrapper[4779]: E0929 09:29:56.715315 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.724967 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60d71749-dfb5-4095-b11b-b70f1a549b88\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a334fd50a045f97d97f67b45abf88dc6aaf3d7c2121becc6a9744c86c977b070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d79814aeee8cb50cbe9a3153ed1a99a82921ac740553c82f6aafd1d7f628cbdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7851ecb6e73d56037db56d7d12fec0a095403080daa7bbf0d4cd79d9a4fd1bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa8dee8aadd625828a22e22668d8f7295ddc3fe24976be6c8610da1157e28d7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a11183ba612a838b92d401b13a2e0d7d1c19de533ecef56b4305e5a9f0784bb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d6677148b1efa21f5eee1dcbccef6d6ea0492513bfa621b1a862488653bc8f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://577276f8e790a07af7c396bfdfef2adc46c85d04eb3948f00aceef15e4124988\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e140d46db39380a5737d78e1aa00ad1a5a9cc217de910ca4212beae87764795\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ncxc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:56Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.771496 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0846537-9071-453d-968b-7537a7233656\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://103d18548d558c96520d7c4c31d3ef44abc2905afa5c26ca3f93d2a1cd6ff604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ce06a39c8fd0fba9a0bd1af96ff34cfb10a153801ecff617db4df5e64eb476b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe3edd15b34d8223ba98808b34bee098cb2ffa1ac4fd3e949007acc04303e370\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22cc6e6e78a7eaac01a6c53d5e5527ef3120c8c44bf28813e65038f939e79872\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cc3416bb385ad0ff953b03c268f186747c47a2dfdf11ba34a3b00be5391e90e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8442bd56bf7c2e6708a3b0f6c21ba803d9bc76a84d32647b507d5961a78e8621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8442bd56bf7c2e6708a3b0f6c21ba803d9bc76a84d32647b507d5961a78e8621\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b03f58ad046bc93040f00ae10e8f265099417fcb84c10b4ca1707d6744729648\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b03f58ad046bc93040f00ae10e8f265099417fcb84c10b4ca1707d6744729648\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7e2abd0b0dc5d3b95c510100e11f7f2fa4b05786325b692244ea51633d2f597e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e2abd0b0dc5d3b95c510100e11f7f2fa4b05786325b692244ea51633d2f597e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:56Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.807255 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.807283 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.807291 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.807305 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.807322 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:56Z","lastTransitionTime":"2025-09-29T09:29:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.910046 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.910084 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.910093 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.910108 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.910117 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:56Z","lastTransitionTime":"2025-09-29T09:29:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:56 crc kubenswrapper[4779]: I0929 09:29:56.932435 4779 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 09:29:57 crc kubenswrapper[4779]: I0929 09:29:57.012298 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:57 crc kubenswrapper[4779]: I0929 09:29:57.012364 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:57 crc kubenswrapper[4779]: I0929 09:29:57.012387 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:57 crc kubenswrapper[4779]: I0929 09:29:57.012413 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:57 crc kubenswrapper[4779]: I0929 09:29:57.012434 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:57Z","lastTransitionTime":"2025-09-29T09:29:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:57 crc kubenswrapper[4779]: I0929 09:29:57.114724 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:57 crc kubenswrapper[4779]: I0929 09:29:57.115031 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:57 crc kubenswrapper[4779]: I0929 09:29:57.115042 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:57 crc kubenswrapper[4779]: I0929 09:29:57.115056 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:57 crc kubenswrapper[4779]: I0929 09:29:57.115065 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:57Z","lastTransitionTime":"2025-09-29T09:29:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:57 crc kubenswrapper[4779]: I0929 09:29:57.223462 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:57 crc kubenswrapper[4779]: I0929 09:29:57.223510 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:57 crc kubenswrapper[4779]: I0929 09:29:57.223524 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:57 crc kubenswrapper[4779]: I0929 09:29:57.223542 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:57 crc kubenswrapper[4779]: I0929 09:29:57.223555 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:57Z","lastTransitionTime":"2025-09-29T09:29:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:57 crc kubenswrapper[4779]: I0929 09:29:57.326728 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:57 crc kubenswrapper[4779]: I0929 09:29:57.326775 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:57 crc kubenswrapper[4779]: I0929 09:29:57.326786 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:57 crc kubenswrapper[4779]: I0929 09:29:57.326804 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:57 crc kubenswrapper[4779]: I0929 09:29:57.326818 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:57Z","lastTransitionTime":"2025-09-29T09:29:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:57 crc kubenswrapper[4779]: I0929 09:29:57.429739 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:57 crc kubenswrapper[4779]: I0929 09:29:57.429816 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:57 crc kubenswrapper[4779]: I0929 09:29:57.429841 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:57 crc kubenswrapper[4779]: I0929 09:29:57.429869 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:57 crc kubenswrapper[4779]: I0929 09:29:57.429885 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:57Z","lastTransitionTime":"2025-09-29T09:29:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:57 crc kubenswrapper[4779]: I0929 09:29:57.532666 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:57 crc kubenswrapper[4779]: I0929 09:29:57.532709 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:57 crc kubenswrapper[4779]: I0929 09:29:57.532722 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:57 crc kubenswrapper[4779]: I0929 09:29:57.532782 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:57 crc kubenswrapper[4779]: I0929 09:29:57.532797 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:57Z","lastTransitionTime":"2025-09-29T09:29:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:57 crc kubenswrapper[4779]: I0929 09:29:57.634833 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:57 crc kubenswrapper[4779]: I0929 09:29:57.634859 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:57 crc kubenswrapper[4779]: I0929 09:29:57.634867 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:57 crc kubenswrapper[4779]: I0929 09:29:57.634879 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:57 crc kubenswrapper[4779]: I0929 09:29:57.634887 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:57Z","lastTransitionTime":"2025-09-29T09:29:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:57 crc kubenswrapper[4779]: I0929 09:29:57.713310 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:29:57 crc kubenswrapper[4779]: E0929 09:29:57.713419 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 09:29:57 crc kubenswrapper[4779]: I0929 09:29:57.737373 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:57 crc kubenswrapper[4779]: I0929 09:29:57.737407 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:57 crc kubenswrapper[4779]: I0929 09:29:57.737415 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:57 crc kubenswrapper[4779]: I0929 09:29:57.737429 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:57 crc kubenswrapper[4779]: I0929 09:29:57.737438 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:57Z","lastTransitionTime":"2025-09-29T09:29:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:57 crc kubenswrapper[4779]: I0929 09:29:57.839309 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:57 crc kubenswrapper[4779]: I0929 09:29:57.839347 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:57 crc kubenswrapper[4779]: I0929 09:29:57.839358 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:57 crc kubenswrapper[4779]: I0929 09:29:57.839376 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:57 crc kubenswrapper[4779]: I0929 09:29:57.839388 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:57Z","lastTransitionTime":"2025-09-29T09:29:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:57 crc kubenswrapper[4779]: I0929 09:29:57.935506 4779 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 09:29:57 crc kubenswrapper[4779]: I0929 09:29:57.941468 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:57 crc kubenswrapper[4779]: I0929 09:29:57.941497 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:57 crc kubenswrapper[4779]: I0929 09:29:57.941506 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:57 crc kubenswrapper[4779]: I0929 09:29:57.941519 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:57 crc kubenswrapper[4779]: I0929 09:29:57.941528 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:57Z","lastTransitionTime":"2025-09-29T09:29:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.043740 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.043770 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.043780 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.043793 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.043802 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:58Z","lastTransitionTime":"2025-09-29T09:29:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.052019 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.052096 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.052121 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.052156 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.052179 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:58Z","lastTransitionTime":"2025-09-29T09:29:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:58 crc kubenswrapper[4779]: E0929 09:29:58.067135 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:29:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:29:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:29:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:29:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"25d3a4e6-deea-47ab-ac6b-f80ccadc03c7\\\",\\\"systemUUID\\\":\\\"6af97324-aa9b-4cb6-ab41-66056c52c25a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:58Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.071268 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.071330 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.071348 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.071381 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.071401 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:58Z","lastTransitionTime":"2025-09-29T09:29:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:58 crc kubenswrapper[4779]: E0929 09:29:58.083382 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:29:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:29:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:29:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:29:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"25d3a4e6-deea-47ab-ac6b-f80ccadc03c7\\\",\\\"systemUUID\\\":\\\"6af97324-aa9b-4cb6-ab41-66056c52c25a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:58Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.087455 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.087594 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.087701 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.087816 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.087956 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:58Z","lastTransitionTime":"2025-09-29T09:29:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:58 crc kubenswrapper[4779]: E0929 09:29:58.098351 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:29:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:29:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:29:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:29:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"25d3a4e6-deea-47ab-ac6b-f80ccadc03c7\\\",\\\"systemUUID\\\":\\\"6af97324-aa9b-4cb6-ab41-66056c52c25a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:58Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.103021 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.103074 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.103092 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.103117 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.103134 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:58Z","lastTransitionTime":"2025-09-29T09:29:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:58 crc kubenswrapper[4779]: E0929 09:29:58.127278 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:29:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:29:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:29:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:29:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"25d3a4e6-deea-47ab-ac6b-f80ccadc03c7\\\",\\\"systemUUID\\\":\\\"6af97324-aa9b-4cb6-ab41-66056c52c25a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:58Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.133008 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.133086 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.133115 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.133149 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.133174 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:58Z","lastTransitionTime":"2025-09-29T09:29:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:58 crc kubenswrapper[4779]: E0929 09:29:58.177208 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:29:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:29:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:29:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:29:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"25d3a4e6-deea-47ab-ac6b-f80ccadc03c7\\\",\\\"systemUUID\\\":\\\"6af97324-aa9b-4cb6-ab41-66056c52c25a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:58Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:58 crc kubenswrapper[4779]: E0929 09:29:58.177367 4779 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.178723 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.178754 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.178764 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.178781 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.178790 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:58Z","lastTransitionTime":"2025-09-29T09:29:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.281462 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.281496 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.281507 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.281522 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.281531 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:58Z","lastTransitionTime":"2025-09-29T09:29:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.384205 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.384237 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.384245 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.384258 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.384273 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:58Z","lastTransitionTime":"2025-09-29T09:29:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.486719 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.486781 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.486800 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.486824 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.486841 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:58Z","lastTransitionTime":"2025-09-29T09:29:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.589556 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.589582 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.589590 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.589604 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.589612 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:58Z","lastTransitionTime":"2025-09-29T09:29:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.692135 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.692178 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.692193 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.692210 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.692223 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:58Z","lastTransitionTime":"2025-09-29T09:29:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.713726 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.713811 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:29:58 crc kubenswrapper[4779]: E0929 09:29:58.713861 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 09:29:58 crc kubenswrapper[4779]: E0929 09:29:58.713946 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.732491 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:58Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.746546 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gx6f2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25e887ba-720d-4f7b-9763-5703781fd8bf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a1604b3eb7d80cc28d7d36550e740405d3012bb83109444bf7354793ebbdeb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9lw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gx6f2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:58Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.766056 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2adc78ec8f54811c8fd09a5e558132f9f57501d9a92d4738935148bb4e714eb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:58Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.778048 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5584" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"564bff56-93cb-42ac-bd34-bbe97f99f411\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedbde6cf29bb77c762d7da2d2e414d3e7c2c6edb3addd97473367b89b2e3fe7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvwtv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5584\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:58Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.794598 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.794665 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.794687 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.794716 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.794738 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:58Z","lastTransitionTime":"2025-09-29T09:29:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.796212 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:58Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.813381 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f96f3b3b0028e52c62e9b886d5f9c3347ec85f91ec0e5ef3de48bd216a2c687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f10690e584c904be791611af52b807ee3d20cb5551821042a1c5e71e1f1571d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:58Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.826200 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f75b386997d5f82c8a8136e8758dac2b770b2ef60bec255c70978046c3e6cd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:58Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.838948 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:58Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.862721 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2tkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b0e23f7-a478-48e2-a745-193a90e87553\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://944ced544427edda31dc5b05053a08a83a71829a896cb6dba002ca20ee3e56e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5rx8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2tkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:58Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.875974 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-twvvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3a42c70c25df51312e2e5ede663678cbccf94d50dad43886557024669dd2e82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfe38229bcd948f0f8e1e234c8e4ea16d4dbc8c5c4252f5486048db0c9910412\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfe38229bcd948f0f8e1e234c8e4ea16d4dbc8c5c4252f5486048db0c9910412\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acbf355d9614f64771657a4103437e9c55f3b2b5080282d7ad3af70a5f44ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbf355d9614f64771657a4103437e9c55f3b2b5080282d7ad3af70a5f44ffce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b854329d7e329a9099a032b8e91ab2064164ae0e43e57b61c5c55b42fca2a9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b854329d7e329a9099a032b8e91ab2064164ae0e43e57b61c5c55b42fca2a9ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93312b300c9348ca8caa0f12bd7825d6b7d1d720babd765aa679c570220ac15a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93312b300c9348ca8caa0f12bd7825d6b7d1d720babd765aa679c570220ac15a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-twvvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:58Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.892957 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e291840-9db5-4515-810a-190428263dfb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6f627c2a2c06fbe034514d02939db8b7576b8cb753aa65a45a271482cf138c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde5335a7ca2db0e56ded3f2fc58506882df09718e6ada6dd1849b891ebc3dd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0f81c275d3f99ff02d4d105dfff34249cff38150c1aa6199bdc765773f0fa2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33e68ef001a27a30ef12923f95eaaef45667d18c8dab2997491caf12f595ed0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6837fc3f6d91505858a90e49945041b663c864b4ebcbd2ecc086c3575b47c1f7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T09:29:42Z\\\",\\\"message\\\":\\\"W0929 09:29:31.795697 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 09:29:31.796138 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759138171 cert, and key in /tmp/serving-cert-3245563206/serving-signer.crt, /tmp/serving-cert-3245563206/serving-signer.key\\\\nI0929 09:29:32.157414 1 observer_polling.go:159] Starting file observer\\\\nW0929 09:29:32.160455 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 09:29:32.160736 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 09:29:32.163324 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3245563206/tls.crt::/tmp/serving-cert-3245563206/tls.key\\\\\\\"\\\\nF0929 09:29:42.581603 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55099c1030bd4943e0df0b13b1203c7143f79edf5e02c8353ccd042610e8059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:58Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.897450 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.897488 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.897498 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.897512 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.897522 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:58Z","lastTransitionTime":"2025-09-29T09:29:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.903824 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1a5d3a7-37d9-4a87-864c-e4af7f504a19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462a76735c6e380ec32967ba87ebe13a19e1d3687ac87182f01b23ce8fabe8e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7979507aa9ce4c5e1c0f9a0d07716433ab9cebcc02e8b6ba01251ced0bab99f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5lnlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:58Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.923935 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60d71749-dfb5-4095-b11b-b70f1a549b88\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a334fd50a045f97d97f67b45abf88dc6aaf3d7c2121becc6a9744c86c977b070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d79814aeee8cb50cbe9a3153ed1a99a82921ac740553c82f6aafd1d7f628cbdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7851ecb6e73d56037db56d7d12fec0a095403080daa7bbf0d4cd79d9a4fd1bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa8dee8aadd625828a22e22668d8f7295ddc3fe24976be6c8610da1157e28d7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a11183ba612a838b92d401b13a2e0d7d1c19de533ecef56b4305e5a9f0784bb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d6677148b1efa21f5eee1dcbccef6d6ea0492513bfa621b1a862488653bc8f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://577276f8e790a07af7c396bfdfef2adc46c85d04eb3948f00aceef15e4124988\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e140d46db39380a5737d78e1aa00ad1a5a9cc217de910ca4212beae87764795\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ncxc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:58Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.939898 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-ncxc4_60d71749-dfb5-4095-b11b-b70f1a549b88/ovnkube-controller/0.log" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.942638 4779 generic.go:334] "Generic (PLEG): container finished" podID="60d71749-dfb5-4095-b11b-b70f1a549b88" containerID="577276f8e790a07af7c396bfdfef2adc46c85d04eb3948f00aceef15e4124988" exitCode=1 Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.942681 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" event={"ID":"60d71749-dfb5-4095-b11b-b70f1a549b88","Type":"ContainerDied","Data":"577276f8e790a07af7c396bfdfef2adc46c85d04eb3948f00aceef15e4124988"} Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.943608 4779 scope.go:117] "RemoveContainer" containerID="577276f8e790a07af7c396bfdfef2adc46c85d04eb3948f00aceef15e4124988" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.949105 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0846537-9071-453d-968b-7537a7233656\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://103d18548d558c96520d7c4c31d3ef44abc2905afa5c26ca3f93d2a1cd6ff604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ce06a39c8fd0fba9a0bd1af96ff34cfb10a153801ecff617db4df5e64eb476b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe3edd15b34d8223ba98808b34bee098cb2ffa1ac4fd3e949007acc04303e370\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22cc6e6e78a7eaac01a6c53d5e5527ef3120c8c44bf28813e65038f939e79872\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cc3416bb385ad0ff953b03c268f186747c47a2dfdf11ba34a3b00be5391e90e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8442bd56bf7c2e6708a3b0f6c21ba803d9bc76a84d32647b507d5961a78e8621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8442bd56bf7c2e6708a3b0f6c21ba803d9bc76a84d32647b507d5961a78e8621\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b03f58ad046bc93040f00ae10e8f265099417fcb84c10b4ca1707d6744729648\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b03f58ad046bc93040f00ae10e8f265099417fcb84c10b4ca1707d6744729648\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7e2abd0b0dc5d3b95c510100e11f7f2fa4b05786325b692244ea51633d2f597e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e2abd0b0dc5d3b95c510100e11f7f2fa4b05786325b692244ea51633d2f597e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:58Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.968008 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19a67159-61a5-4f75-940a-212c850bd498\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ff866eb8fdb247e8bfe638be9525a31733ed912047dcbbc7514ab6324675b6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a69616a6ca151dfda518aae66819aac108f73967794661b878db2480ffbb5b16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4bb91e73e9e2666ab82d5a13fee96ed7406f6178863603e607766ae3172d3ee6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2148aa80d9f71c8424530711e10af5b0bd10437655123ac57b87d1c90a280233\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:58Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.982214 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:58Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.992461 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5584" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"564bff56-93cb-42ac-bd34-bbe97f99f411\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedbde6cf29bb77c762d7da2d2e414d3e7c2c6edb3addd97473367b89b2e3fe7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvwtv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5584\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:58Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.999020 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.999045 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.999055 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.999070 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:58 crc kubenswrapper[4779]: I0929 09:29:58.999083 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:58Z","lastTransitionTime":"2025-09-29T09:29:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.002406 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gx6f2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25e887ba-720d-4f7b-9763-5703781fd8bf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a1604b3eb7d80cc28d7d36550e740405d3012bb83109444bf7354793ebbdeb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9lw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gx6f2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:59Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.015802 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2adc78ec8f54811c8fd09a5e558132f9f57501d9a92d4738935148bb4e714eb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:59Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.028467 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:59Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.041494 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f96f3b3b0028e52c62e9b886d5f9c3347ec85f91ec0e5ef3de48bd216a2c687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f10690e584c904be791611af52b807ee3d20cb5551821042a1c5e71e1f1571d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:59Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.052183 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f75b386997d5f82c8a8136e8758dac2b770b2ef60bec255c70978046c3e6cd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:59Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.064026 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:59Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.077810 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2tkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b0e23f7-a478-48e2-a745-193a90e87553\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://944ced544427edda31dc5b05053a08a83a71829a896cb6dba002ca20ee3e56e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5rx8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2tkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:59Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.094372 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-twvvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3a42c70c25df51312e2e5ede663678cbccf94d50dad43886557024669dd2e82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfe38229bcd948f0f8e1e234c8e4ea16d4dbc8c5c4252f5486048db0c9910412\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfe38229bcd948f0f8e1e234c8e4ea16d4dbc8c5c4252f5486048db0c9910412\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acbf355d9614f64771657a4103437e9c55f3b2b5080282d7ad3af70a5f44ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbf355d9614f64771657a4103437e9c55f3b2b5080282d7ad3af70a5f44ffce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b854329d7e329a9099a032b8e91ab2064164ae0e43e57b61c5c55b42fca2a9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b854329d7e329a9099a032b8e91ab2064164ae0e43e57b61c5c55b42fca2a9ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93312b300c9348ca8caa0f12bd7825d6b7d1d720babd765aa679c570220ac15a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93312b300c9348ca8caa0f12bd7825d6b7d1d720babd765aa679c570220ac15a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-twvvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:59Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.101541 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.101574 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.101614 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.101638 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.101651 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:59Z","lastTransitionTime":"2025-09-29T09:29:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.116956 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e291840-9db5-4515-810a-190428263dfb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6f627c2a2c06fbe034514d02939db8b7576b8cb753aa65a45a271482cf138c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde5335a7ca2db0e56ded3f2fc58506882df09718e6ada6dd1849b891ebc3dd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0f81c275d3f99ff02d4d105dfff34249cff38150c1aa6199bdc765773f0fa2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33e68ef001a27a30ef12923f95eaaef45667d18c8dab2997491caf12f595ed0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6837fc3f6d91505858a90e49945041b663c864b4ebcbd2ecc086c3575b47c1f7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T09:29:42Z\\\",\\\"message\\\":\\\"W0929 09:29:31.795697 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 09:29:31.796138 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759138171 cert, and key in /tmp/serving-cert-3245563206/serving-signer.crt, /tmp/serving-cert-3245563206/serving-signer.key\\\\nI0929 09:29:32.157414 1 observer_polling.go:159] Starting file observer\\\\nW0929 09:29:32.160455 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 09:29:32.160736 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 09:29:32.163324 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3245563206/tls.crt::/tmp/serving-cert-3245563206/tls.key\\\\\\\"\\\\nF0929 09:29:42.581603 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55099c1030bd4943e0df0b13b1203c7143f79edf5e02c8353ccd042610e8059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:59Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.132436 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19a67159-61a5-4f75-940a-212c850bd498\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ff866eb8fdb247e8bfe638be9525a31733ed912047dcbbc7514ab6324675b6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a69616a6ca151dfda518aae66819aac108f73967794661b878db2480ffbb5b16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4bb91e73e9e2666ab82d5a13fee96ed7406f6178863603e607766ae3172d3ee6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2148aa80d9f71c8424530711e10af5b0bd10437655123ac57b87d1c90a280233\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:59Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.143837 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1a5d3a7-37d9-4a87-864c-e4af7f504a19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462a76735c6e380ec32967ba87ebe13a19e1d3687ac87182f01b23ce8fabe8e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7979507aa9ce4c5e1c0f9a0d07716433ab9cebcc02e8b6ba01251ced0bab99f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5lnlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:59Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.165704 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60d71749-dfb5-4095-b11b-b70f1a549b88\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a334fd50a045f97d97f67b45abf88dc6aaf3d7c2121becc6a9744c86c977b070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d79814aeee8cb50cbe9a3153ed1a99a82921ac740553c82f6aafd1d7f628cbdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7851ecb6e73d56037db56d7d12fec0a095403080daa7bbf0d4cd79d9a4fd1bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa8dee8aadd625828a22e22668d8f7295ddc3fe24976be6c8610da1157e28d7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a11183ba612a838b92d401b13a2e0d7d1c19de533ecef56b4305e5a9f0784bb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d6677148b1efa21f5eee1dcbccef6d6ea0492513bfa621b1a862488653bc8f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://577276f8e790a07af7c396bfdfef2adc46c85d04eb3948f00aceef15e4124988\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://577276f8e790a07af7c396bfdfef2adc46c85d04eb3948f00aceef15e4124988\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T09:29:58Z\\\",\\\"message\\\":\\\"from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0929 09:29:58.262686 6064 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0929 09:29:58.262891 6064 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0929 09:29:58.263707 6064 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0929 09:29:58.263780 6064 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0929 09:29:58.263810 6064 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0929 09:29:58.263848 6064 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0929 09:29:58.263946 6064 factory.go:656] Stopping watch factory\\\\nI0929 09:29:58.263982 6064 ovnkube.go:599] Stopped ovnkube\\\\nI0929 09:29:58.263866 6064 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0929 09:29:58.263881 6064 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0929 09:29:58.264033 6064 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0929 09:29:5\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e140d46db39380a5737d78e1aa00ad1a5a9cc217de910ca4212beae87764795\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ncxc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:59Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.182302 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0846537-9071-453d-968b-7537a7233656\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://103d18548d558c96520d7c4c31d3ef44abc2905afa5c26ca3f93d2a1cd6ff604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ce06a39c8fd0fba9a0bd1af96ff34cfb10a153801ecff617db4df5e64eb476b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe3edd15b34d8223ba98808b34bee098cb2ffa1ac4fd3e949007acc04303e370\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22cc6e6e78a7eaac01a6c53d5e5527ef3120c8c44bf28813e65038f939e79872\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cc3416bb385ad0ff953b03c268f186747c47a2dfdf11ba34a3b00be5391e90e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8442bd56bf7c2e6708a3b0f6c21ba803d9bc76a84d32647b507d5961a78e8621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8442bd56bf7c2e6708a3b0f6c21ba803d9bc76a84d32647b507d5961a78e8621\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b03f58ad046bc93040f00ae10e8f265099417fcb84c10b4ca1707d6744729648\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b03f58ad046bc93040f00ae10e8f265099417fcb84c10b4ca1707d6744729648\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7e2abd0b0dc5d3b95c510100e11f7f2fa4b05786325b692244ea51633d2f597e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e2abd0b0dc5d3b95c510100e11f7f2fa4b05786325b692244ea51633d2f597e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:59Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.203839 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.203874 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.203886 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.203917 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.203929 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:59Z","lastTransitionTime":"2025-09-29T09:29:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.306580 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.306612 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.306622 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.306639 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.306650 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:59Z","lastTransitionTime":"2025-09-29T09:29:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.408546 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.408603 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.408622 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.408645 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.408662 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:59Z","lastTransitionTime":"2025-09-29T09:29:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.426742 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.443753 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:59Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.463163 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2adc78ec8f54811c8fd09a5e558132f9f57501d9a92d4738935148bb4e714eb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:59Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.478096 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5584" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"564bff56-93cb-42ac-bd34-bbe97f99f411\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedbde6cf29bb77c762d7da2d2e414d3e7c2c6edb3addd97473367b89b2e3fe7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvwtv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5584\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:59Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.492287 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gx6f2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25e887ba-720d-4f7b-9763-5703781fd8bf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a1604b3eb7d80cc28d7d36550e740405d3012bb83109444bf7354793ebbdeb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9lw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gx6f2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:59Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.513062 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f96f3b3b0028e52c62e9b886d5f9c3347ec85f91ec0e5ef3de48bd216a2c687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f10690e584c904be791611af52b807ee3d20cb5551821042a1c5e71e1f1571d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:59Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.513661 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.513694 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.513703 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.513716 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.513727 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:59Z","lastTransitionTime":"2025-09-29T09:29:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.532773 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f75b386997d5f82c8a8136e8758dac2b770b2ef60bec255c70978046c3e6cd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:59Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.545238 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:59Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.557769 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2tkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b0e23f7-a478-48e2-a745-193a90e87553\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://944ced544427edda31dc5b05053a08a83a71829a896cb6dba002ca20ee3e56e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5rx8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2tkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:59Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.571319 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-twvvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3a42c70c25df51312e2e5ede663678cbccf94d50dad43886557024669dd2e82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfe38229bcd948f0f8e1e234c8e4ea16d4dbc8c5c4252f5486048db0c9910412\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfe38229bcd948f0f8e1e234c8e4ea16d4dbc8c5c4252f5486048db0c9910412\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acbf355d9614f64771657a4103437e9c55f3b2b5080282d7ad3af70a5f44ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbf355d9614f64771657a4103437e9c55f3b2b5080282d7ad3af70a5f44ffce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b854329d7e329a9099a032b8e91ab2064164ae0e43e57b61c5c55b42fca2a9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b854329d7e329a9099a032b8e91ab2064164ae0e43e57b61c5c55b42fca2a9ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93312b300c9348ca8caa0f12bd7825d6b7d1d720babd765aa679c570220ac15a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93312b300c9348ca8caa0f12bd7825d6b7d1d720babd765aa679c570220ac15a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-twvvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:59Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.586750 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e291840-9db5-4515-810a-190428263dfb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6f627c2a2c06fbe034514d02939db8b7576b8cb753aa65a45a271482cf138c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde5335a7ca2db0e56ded3f2fc58506882df09718e6ada6dd1849b891ebc3dd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0f81c275d3f99ff02d4d105dfff34249cff38150c1aa6199bdc765773f0fa2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33e68ef001a27a30ef12923f95eaaef45667d18c8dab2997491caf12f595ed0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6837fc3f6d91505858a90e49945041b663c864b4ebcbd2ecc086c3575b47c1f7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T09:29:42Z\\\",\\\"message\\\":\\\"W0929 09:29:31.795697 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 09:29:31.796138 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759138171 cert, and key in /tmp/serving-cert-3245563206/serving-signer.crt, /tmp/serving-cert-3245563206/serving-signer.key\\\\nI0929 09:29:32.157414 1 observer_polling.go:159] Starting file observer\\\\nW0929 09:29:32.160455 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 09:29:32.160736 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 09:29:32.163324 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3245563206/tls.crt::/tmp/serving-cert-3245563206/tls.key\\\\\\\"\\\\nF0929 09:29:42.581603 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55099c1030bd4943e0df0b13b1203c7143f79edf5e02c8353ccd042610e8059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:59Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.601877 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:59Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.616156 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.616200 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.616209 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.616222 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.616232 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:59Z","lastTransitionTime":"2025-09-29T09:29:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.617789 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60d71749-dfb5-4095-b11b-b70f1a549b88\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a334fd50a045f97d97f67b45abf88dc6aaf3d7c2121becc6a9744c86c977b070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d79814aeee8cb50cbe9a3153ed1a99a82921ac740553c82f6aafd1d7f628cbdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7851ecb6e73d56037db56d7d12fec0a095403080daa7bbf0d4cd79d9a4fd1bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa8dee8aadd625828a22e22668d8f7295ddc3fe24976be6c8610da1157e28d7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a11183ba612a838b92d401b13a2e0d7d1c19de533ecef56b4305e5a9f0784bb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d6677148b1efa21f5eee1dcbccef6d6ea0492513bfa621b1a862488653bc8f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://577276f8e790a07af7c396bfdfef2adc46c85d04eb3948f00aceef15e4124988\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://577276f8e790a07af7c396bfdfef2adc46c85d04eb3948f00aceef15e4124988\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T09:29:58Z\\\",\\\"message\\\":\\\"from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0929 09:29:58.262686 6064 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0929 09:29:58.262891 6064 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0929 09:29:58.263707 6064 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0929 09:29:58.263780 6064 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0929 09:29:58.263810 6064 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0929 09:29:58.263848 6064 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0929 09:29:58.263946 6064 factory.go:656] Stopping watch factory\\\\nI0929 09:29:58.263982 6064 ovnkube.go:599] Stopped ovnkube\\\\nI0929 09:29:58.263866 6064 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0929 09:29:58.263881 6064 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0929 09:29:58.264033 6064 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0929 09:29:5\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e140d46db39380a5737d78e1aa00ad1a5a9cc217de910ca4212beae87764795\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ncxc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:59Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.635885 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0846537-9071-453d-968b-7537a7233656\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://103d18548d558c96520d7c4c31d3ef44abc2905afa5c26ca3f93d2a1cd6ff604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ce06a39c8fd0fba9a0bd1af96ff34cfb10a153801ecff617db4df5e64eb476b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe3edd15b34d8223ba98808b34bee098cb2ffa1ac4fd3e949007acc04303e370\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22cc6e6e78a7eaac01a6c53d5e5527ef3120c8c44bf28813e65038f939e79872\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cc3416bb385ad0ff953b03c268f186747c47a2dfdf11ba34a3b00be5391e90e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8442bd56bf7c2e6708a3b0f6c21ba803d9bc76a84d32647b507d5961a78e8621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8442bd56bf7c2e6708a3b0f6c21ba803d9bc76a84d32647b507d5961a78e8621\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b03f58ad046bc93040f00ae10e8f265099417fcb84c10b4ca1707d6744729648\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b03f58ad046bc93040f00ae10e8f265099417fcb84c10b4ca1707d6744729648\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7e2abd0b0dc5d3b95c510100e11f7f2fa4b05786325b692244ea51633d2f597e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e2abd0b0dc5d3b95c510100e11f7f2fa4b05786325b692244ea51633d2f597e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:59Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.646166 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19a67159-61a5-4f75-940a-212c850bd498\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ff866eb8fdb247e8bfe638be9525a31733ed912047dcbbc7514ab6324675b6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a69616a6ca151dfda518aae66819aac108f73967794661b878db2480ffbb5b16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4bb91e73e9e2666ab82d5a13fee96ed7406f6178863603e607766ae3172d3ee6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2148aa80d9f71c8424530711e10af5b0bd10437655123ac57b87d1c90a280233\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:59Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.659488 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1a5d3a7-37d9-4a87-864c-e4af7f504a19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462a76735c6e380ec32967ba87ebe13a19e1d3687ac87182f01b23ce8fabe8e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7979507aa9ce4c5e1c0f9a0d07716433ab9cebcc02e8b6ba01251ced0bab99f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5lnlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:59Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.713344 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:29:59 crc kubenswrapper[4779]: E0929 09:29:59.713489 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.718558 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.718595 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.718605 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.718619 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.718627 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:59Z","lastTransitionTime":"2025-09-29T09:29:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.821023 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.821057 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.821066 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.821079 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.821088 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:59Z","lastTransitionTime":"2025-09-29T09:29:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.923065 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.923112 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.923124 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.923143 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.923155 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:29:59Z","lastTransitionTime":"2025-09-29T09:29:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.947533 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-ncxc4_60d71749-dfb5-4095-b11b-b70f1a549b88/ovnkube-controller/1.log" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.948143 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-ncxc4_60d71749-dfb5-4095-b11b-b70f1a549b88/ovnkube-controller/0.log" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.951019 4779 generic.go:334] "Generic (PLEG): container finished" podID="60d71749-dfb5-4095-b11b-b70f1a549b88" containerID="97748c7b388a564dd18c6e13387163fc397c49cc10ed1cadaa7f44f95cc17741" exitCode=1 Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.951059 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" event={"ID":"60d71749-dfb5-4095-b11b-b70f1a549b88","Type":"ContainerDied","Data":"97748c7b388a564dd18c6e13387163fc397c49cc10ed1cadaa7f44f95cc17741"} Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.951099 4779 scope.go:117] "RemoveContainer" containerID="577276f8e790a07af7c396bfdfef2adc46c85d04eb3948f00aceef15e4124988" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.951734 4779 scope.go:117] "RemoveContainer" containerID="97748c7b388a564dd18c6e13387163fc397c49cc10ed1cadaa7f44f95cc17741" Sep 29 09:29:59 crc kubenswrapper[4779]: E0929 09:29:59.951947 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-ncxc4_openshift-ovn-kubernetes(60d71749-dfb5-4095-b11b-b70f1a549b88)\"" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" podUID="60d71749-dfb5-4095-b11b-b70f1a549b88" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.978773 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0846537-9071-453d-968b-7537a7233656\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://103d18548d558c96520d7c4c31d3ef44abc2905afa5c26ca3f93d2a1cd6ff604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ce06a39c8fd0fba9a0bd1af96ff34cfb10a153801ecff617db4df5e64eb476b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe3edd15b34d8223ba98808b34bee098cb2ffa1ac4fd3e949007acc04303e370\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22cc6e6e78a7eaac01a6c53d5e5527ef3120c8c44bf28813e65038f939e79872\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cc3416bb385ad0ff953b03c268f186747c47a2dfdf11ba34a3b00be5391e90e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8442bd56bf7c2e6708a3b0f6c21ba803d9bc76a84d32647b507d5961a78e8621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8442bd56bf7c2e6708a3b0f6c21ba803d9bc76a84d32647b507d5961a78e8621\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b03f58ad046bc93040f00ae10e8f265099417fcb84c10b4ca1707d6744729648\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b03f58ad046bc93040f00ae10e8f265099417fcb84c10b4ca1707d6744729648\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7e2abd0b0dc5d3b95c510100e11f7f2fa4b05786325b692244ea51633d2f597e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e2abd0b0dc5d3b95c510100e11f7f2fa4b05786325b692244ea51633d2f597e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:59Z is after 2025-08-24T17:21:41Z" Sep 29 09:29:59 crc kubenswrapper[4779]: I0929 09:29:59.992377 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19a67159-61a5-4f75-940a-212c850bd498\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ff866eb8fdb247e8bfe638be9525a31733ed912047dcbbc7514ab6324675b6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a69616a6ca151dfda518aae66819aac108f73967794661b878db2480ffbb5b16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4bb91e73e9e2666ab82d5a13fee96ed7406f6178863603e607766ae3172d3ee6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2148aa80d9f71c8424530711e10af5b0bd10437655123ac57b87d1c90a280233\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:59Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.001410 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1a5d3a7-37d9-4a87-864c-e4af7f504a19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462a76735c6e380ec32967ba87ebe13a19e1d3687ac87182f01b23ce8fabe8e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7979507aa9ce4c5e1c0f9a0d07716433ab9cebcc02e8b6ba01251ced0bab99f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5lnlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:00Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.016619 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60d71749-dfb5-4095-b11b-b70f1a549b88\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a334fd50a045f97d97f67b45abf88dc6aaf3d7c2121becc6a9744c86c977b070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d79814aeee8cb50cbe9a3153ed1a99a82921ac740553c82f6aafd1d7f628cbdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7851ecb6e73d56037db56d7d12fec0a095403080daa7bbf0d4cd79d9a4fd1bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa8dee8aadd625828a22e22668d8f7295ddc3fe24976be6c8610da1157e28d7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a11183ba612a838b92d401b13a2e0d7d1c19de533ecef56b4305e5a9f0784bb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d6677148b1efa21f5eee1dcbccef6d6ea0492513bfa621b1a862488653bc8f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97748c7b388a564dd18c6e13387163fc397c49cc10ed1cadaa7f44f95cc17741\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://577276f8e790a07af7c396bfdfef2adc46c85d04eb3948f00aceef15e4124988\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T09:29:58Z\\\",\\\"message\\\":\\\"from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0929 09:29:58.262686 6064 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0929 09:29:58.262891 6064 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0929 09:29:58.263707 6064 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0929 09:29:58.263780 6064 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0929 09:29:58.263810 6064 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0929 09:29:58.263848 6064 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0929 09:29:58.263946 6064 factory.go:656] Stopping watch factory\\\\nI0929 09:29:58.263982 6064 ovnkube.go:599] Stopped ovnkube\\\\nI0929 09:29:58.263866 6064 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0929 09:29:58.263881 6064 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0929 09:29:58.264033 6064 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0929 09:29:5\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:55Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97748c7b388a564dd18c6e13387163fc397c49cc10ed1cadaa7f44f95cc17741\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T09:29:59Z\\\",\\\"message\\\":\\\"de crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:59Z is after 2025-08-24T17:21:41Z]\\\\nI0929 09:29:59.880430 6181 services_controller.go:451] Built service openshift-machine-api/control-plane-machine-set-operator cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/control-plane-machine-set-operator_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/control-plane-machine-set-operator\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.41\\\\\\\", Port:9443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0929 09:29:59.880447 6181 services_controller.go:452] Built service openshift-machine-api/control-plane-machine-set-operator per-node LB for network=default: []services.LB{}\\\\nI0929 09:29:59.\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e140d46db39380a5737d78e1aa00ad1a5a9cc217de910ca4212beae87764795\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ncxc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:00Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.025418 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.025461 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.025472 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.025488 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.025499 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:00Z","lastTransitionTime":"2025-09-29T09:30:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.030432 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:00Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.041376 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2adc78ec8f54811c8fd09a5e558132f9f57501d9a92d4738935148bb4e714eb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:00Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.055260 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5584" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"564bff56-93cb-42ac-bd34-bbe97f99f411\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedbde6cf29bb77c762d7da2d2e414d3e7c2c6edb3addd97473367b89b2e3fe7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvwtv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5584\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:00Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.063749 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gx6f2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25e887ba-720d-4f7b-9763-5703781fd8bf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a1604b3eb7d80cc28d7d36550e740405d3012bb83109444bf7354793ebbdeb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9lw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gx6f2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:00Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.080103 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:00Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.095762 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2tkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b0e23f7-a478-48e2-a745-193a90e87553\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://944ced544427edda31dc5b05053a08a83a71829a896cb6dba002ca20ee3e56e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5rx8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2tkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:00Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.115038 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-twvvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3a42c70c25df51312e2e5ede663678cbccf94d50dad43886557024669dd2e82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfe38229bcd948f0f8e1e234c8e4ea16d4dbc8c5c4252f5486048db0c9910412\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfe38229bcd948f0f8e1e234c8e4ea16d4dbc8c5c4252f5486048db0c9910412\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acbf355d9614f64771657a4103437e9c55f3b2b5080282d7ad3af70a5f44ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbf355d9614f64771657a4103437e9c55f3b2b5080282d7ad3af70a5f44ffce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b854329d7e329a9099a032b8e91ab2064164ae0e43e57b61c5c55b42fca2a9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b854329d7e329a9099a032b8e91ab2064164ae0e43e57b61c5c55b42fca2a9ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93312b300c9348ca8caa0f12bd7825d6b7d1d720babd765aa679c570220ac15a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93312b300c9348ca8caa0f12bd7825d6b7d1d720babd765aa679c570220ac15a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-twvvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:00Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.127757 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.128016 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.128028 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.128043 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.128053 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:00Z","lastTransitionTime":"2025-09-29T09:30:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.130825 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e291840-9db5-4515-810a-190428263dfb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6f627c2a2c06fbe034514d02939db8b7576b8cb753aa65a45a271482cf138c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde5335a7ca2db0e56ded3f2fc58506882df09718e6ada6dd1849b891ebc3dd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0f81c275d3f99ff02d4d105dfff34249cff38150c1aa6199bdc765773f0fa2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33e68ef001a27a30ef12923f95eaaef45667d18c8dab2997491caf12f595ed0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6837fc3f6d91505858a90e49945041b663c864b4ebcbd2ecc086c3575b47c1f7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T09:29:42Z\\\",\\\"message\\\":\\\"W0929 09:29:31.795697 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 09:29:31.796138 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759138171 cert, and key in /tmp/serving-cert-3245563206/serving-signer.crt, /tmp/serving-cert-3245563206/serving-signer.key\\\\nI0929 09:29:32.157414 1 observer_polling.go:159] Starting file observer\\\\nW0929 09:29:32.160455 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 09:29:32.160736 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 09:29:32.163324 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3245563206/tls.crt::/tmp/serving-cert-3245563206/tls.key\\\\\\\"\\\\nF0929 09:29:42.581603 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55099c1030bd4943e0df0b13b1203c7143f79edf5e02c8353ccd042610e8059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:00Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.149564 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:00Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.168130 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f96f3b3b0028e52c62e9b886d5f9c3347ec85f91ec0e5ef3de48bd216a2c687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f10690e584c904be791611af52b807ee3d20cb5551821042a1c5e71e1f1571d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:00Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.181945 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f75b386997d5f82c8a8136e8758dac2b770b2ef60bec255c70978046c3e6cd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:00Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.231580 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.231638 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.231657 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.231731 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.231780 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:00Z","lastTransitionTime":"2025-09-29T09:30:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.335375 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.335428 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.335447 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.335724 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.335752 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:00Z","lastTransitionTime":"2025-09-29T09:30:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.438117 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.438158 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.438170 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.438186 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.438198 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:00Z","lastTransitionTime":"2025-09-29T09:30:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.540830 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.540881 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.540897 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.540950 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.540967 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:00Z","lastTransitionTime":"2025-09-29T09:30:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.643520 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.643560 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.643570 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.643583 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.643592 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:00Z","lastTransitionTime":"2025-09-29T09:30:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.713505 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.713584 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:30:00 crc kubenswrapper[4779]: E0929 09:30:00.713722 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 09:30:00 crc kubenswrapper[4779]: E0929 09:30:00.713851 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.747014 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.747059 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.747072 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.747092 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.747104 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:00Z","lastTransitionTime":"2025-09-29T09:30:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.848859 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.848891 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.848919 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.848932 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.848941 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:00Z","lastTransitionTime":"2025-09-29T09:30:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.951292 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.951319 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.951328 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.951342 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.951351 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:00Z","lastTransitionTime":"2025-09-29T09:30:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:00 crc kubenswrapper[4779]: I0929 09:30:00.955501 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-ncxc4_60d71749-dfb5-4095-b11b-b70f1a549b88/ovnkube-controller/1.log" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.053958 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.054042 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.054053 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.054069 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.054078 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:01Z","lastTransitionTime":"2025-09-29T09:30:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.067492 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fxjq2"] Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.068109 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fxjq2" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.069892 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.070175 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.086996 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2adc78ec8f54811c8fd09a5e558132f9f57501d9a92d4738935148bb4e714eb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:01Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.096185 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zqdj2\" (UniqueName: \"kubernetes.io/projected/c469d18e-5b53-4a41-9305-87ae2f8db671-kube-api-access-zqdj2\") pod \"ovnkube-control-plane-749d76644c-fxjq2\" (UID: \"c469d18e-5b53-4a41-9305-87ae2f8db671\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fxjq2" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.096237 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/c469d18e-5b53-4a41-9305-87ae2f8db671-env-overrides\") pod \"ovnkube-control-plane-749d76644c-fxjq2\" (UID: \"c469d18e-5b53-4a41-9305-87ae2f8db671\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fxjq2" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.096291 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/c469d18e-5b53-4a41-9305-87ae2f8db671-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-fxjq2\" (UID: \"c469d18e-5b53-4a41-9305-87ae2f8db671\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fxjq2" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.096332 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/c469d18e-5b53-4a41-9305-87ae2f8db671-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-fxjq2\" (UID: \"c469d18e-5b53-4a41-9305-87ae2f8db671\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fxjq2" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.098976 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5584" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"564bff56-93cb-42ac-bd34-bbe97f99f411\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedbde6cf29bb77c762d7da2d2e414d3e7c2c6edb3addd97473367b89b2e3fe7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvwtv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5584\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:01Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.111148 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gx6f2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25e887ba-720d-4f7b-9763-5703781fd8bf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a1604b3eb7d80cc28d7d36550e740405d3012bb83109444bf7354793ebbdeb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9lw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gx6f2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:01Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.124619 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:01Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.138785 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2tkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b0e23f7-a478-48e2-a745-193a90e87553\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://944ced544427edda31dc5b05053a08a83a71829a896cb6dba002ca20ee3e56e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5rx8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2tkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:01Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.156103 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-twvvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3a42c70c25df51312e2e5ede663678cbccf94d50dad43886557024669dd2e82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfe38229bcd948f0f8e1e234c8e4ea16d4dbc8c5c4252f5486048db0c9910412\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfe38229bcd948f0f8e1e234c8e4ea16d4dbc8c5c4252f5486048db0c9910412\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acbf355d9614f64771657a4103437e9c55f3b2b5080282d7ad3af70a5f44ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbf355d9614f64771657a4103437e9c55f3b2b5080282d7ad3af70a5f44ffce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b854329d7e329a9099a032b8e91ab2064164ae0e43e57b61c5c55b42fca2a9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b854329d7e329a9099a032b8e91ab2064164ae0e43e57b61c5c55b42fca2a9ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93312b300c9348ca8caa0f12bd7825d6b7d1d720babd765aa679c570220ac15a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93312b300c9348ca8caa0f12bd7825d6b7d1d720babd765aa679c570220ac15a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-twvvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:01Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.157218 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.157241 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.157249 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.157261 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.157272 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:01Z","lastTransitionTime":"2025-09-29T09:30:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.169946 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e291840-9db5-4515-810a-190428263dfb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6f627c2a2c06fbe034514d02939db8b7576b8cb753aa65a45a271482cf138c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde5335a7ca2db0e56ded3f2fc58506882df09718e6ada6dd1849b891ebc3dd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0f81c275d3f99ff02d4d105dfff34249cff38150c1aa6199bdc765773f0fa2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33e68ef001a27a30ef12923f95eaaef45667d18c8dab2997491caf12f595ed0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6837fc3f6d91505858a90e49945041b663c864b4ebcbd2ecc086c3575b47c1f7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T09:29:42Z\\\",\\\"message\\\":\\\"W0929 09:29:31.795697 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 09:29:31.796138 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759138171 cert, and key in /tmp/serving-cert-3245563206/serving-signer.crt, /tmp/serving-cert-3245563206/serving-signer.key\\\\nI0929 09:29:32.157414 1 observer_polling.go:159] Starting file observer\\\\nW0929 09:29:32.160455 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 09:29:32.160736 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 09:29:32.163324 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3245563206/tls.crt::/tmp/serving-cert-3245563206/tls.key\\\\\\\"\\\\nF0929 09:29:42.581603 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55099c1030bd4943e0df0b13b1203c7143f79edf5e02c8353ccd042610e8059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:01Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.182467 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:01Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.194680 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f96f3b3b0028e52c62e9b886d5f9c3347ec85f91ec0e5ef3de48bd216a2c687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f10690e584c904be791611af52b807ee3d20cb5551821042a1c5e71e1f1571d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:01Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.197287 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/c469d18e-5b53-4a41-9305-87ae2f8db671-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-fxjq2\" (UID: \"c469d18e-5b53-4a41-9305-87ae2f8db671\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fxjq2" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.197340 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zqdj2\" (UniqueName: \"kubernetes.io/projected/c469d18e-5b53-4a41-9305-87ae2f8db671-kube-api-access-zqdj2\") pod \"ovnkube-control-plane-749d76644c-fxjq2\" (UID: \"c469d18e-5b53-4a41-9305-87ae2f8db671\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fxjq2" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.197380 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/c469d18e-5b53-4a41-9305-87ae2f8db671-env-overrides\") pod \"ovnkube-control-plane-749d76644c-fxjq2\" (UID: \"c469d18e-5b53-4a41-9305-87ae2f8db671\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fxjq2" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.197452 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/c469d18e-5b53-4a41-9305-87ae2f8db671-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-fxjq2\" (UID: \"c469d18e-5b53-4a41-9305-87ae2f8db671\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fxjq2" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.197943 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/c469d18e-5b53-4a41-9305-87ae2f8db671-env-overrides\") pod \"ovnkube-control-plane-749d76644c-fxjq2\" (UID: \"c469d18e-5b53-4a41-9305-87ae2f8db671\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fxjq2" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.198323 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/c469d18e-5b53-4a41-9305-87ae2f8db671-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-fxjq2\" (UID: \"c469d18e-5b53-4a41-9305-87ae2f8db671\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fxjq2" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.203384 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/c469d18e-5b53-4a41-9305-87ae2f8db671-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-fxjq2\" (UID: \"c469d18e-5b53-4a41-9305-87ae2f8db671\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fxjq2" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.207278 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f75b386997d5f82c8a8136e8758dac2b770b2ef60bec255c70978046c3e6cd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:01Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.220436 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zqdj2\" (UniqueName: \"kubernetes.io/projected/c469d18e-5b53-4a41-9305-87ae2f8db671-kube-api-access-zqdj2\") pod \"ovnkube-control-plane-749d76644c-fxjq2\" (UID: \"c469d18e-5b53-4a41-9305-87ae2f8db671\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fxjq2" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.230029 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0846537-9071-453d-968b-7537a7233656\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://103d18548d558c96520d7c4c31d3ef44abc2905afa5c26ca3f93d2a1cd6ff604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ce06a39c8fd0fba9a0bd1af96ff34cfb10a153801ecff617db4df5e64eb476b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe3edd15b34d8223ba98808b34bee098cb2ffa1ac4fd3e949007acc04303e370\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22cc6e6e78a7eaac01a6c53d5e5527ef3120c8c44bf28813e65038f939e79872\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cc3416bb385ad0ff953b03c268f186747c47a2dfdf11ba34a3b00be5391e90e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8442bd56bf7c2e6708a3b0f6c21ba803d9bc76a84d32647b507d5961a78e8621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8442bd56bf7c2e6708a3b0f6c21ba803d9bc76a84d32647b507d5961a78e8621\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b03f58ad046bc93040f00ae10e8f265099417fcb84c10b4ca1707d6744729648\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b03f58ad046bc93040f00ae10e8f265099417fcb84c10b4ca1707d6744729648\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7e2abd0b0dc5d3b95c510100e11f7f2fa4b05786325b692244ea51633d2f597e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e2abd0b0dc5d3b95c510100e11f7f2fa4b05786325b692244ea51633d2f597e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:01Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.241234 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19a67159-61a5-4f75-940a-212c850bd498\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ff866eb8fdb247e8bfe638be9525a31733ed912047dcbbc7514ab6324675b6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a69616a6ca151dfda518aae66819aac108f73967794661b878db2480ffbb5b16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4bb91e73e9e2666ab82d5a13fee96ed7406f6178863603e607766ae3172d3ee6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2148aa80d9f71c8424530711e10af5b0bd10437655123ac57b87d1c90a280233\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:01Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.253783 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1a5d3a7-37d9-4a87-864c-e4af7f504a19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462a76735c6e380ec32967ba87ebe13a19e1d3687ac87182f01b23ce8fabe8e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7979507aa9ce4c5e1c0f9a0d07716433ab9cebcc02e8b6ba01251ced0bab99f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5lnlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:01Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.259925 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.259954 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.259964 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.259978 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.259989 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:01Z","lastTransitionTime":"2025-09-29T09:30:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.275044 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60d71749-dfb5-4095-b11b-b70f1a549b88\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a334fd50a045f97d97f67b45abf88dc6aaf3d7c2121becc6a9744c86c977b070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d79814aeee8cb50cbe9a3153ed1a99a82921ac740553c82f6aafd1d7f628cbdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7851ecb6e73d56037db56d7d12fec0a095403080daa7bbf0d4cd79d9a4fd1bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa8dee8aadd625828a22e22668d8f7295ddc3fe24976be6c8610da1157e28d7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a11183ba612a838b92d401b13a2e0d7d1c19de533ecef56b4305e5a9f0784bb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d6677148b1efa21f5eee1dcbccef6d6ea0492513bfa621b1a862488653bc8f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97748c7b388a564dd18c6e13387163fc397c49cc10ed1cadaa7f44f95cc17741\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://577276f8e790a07af7c396bfdfef2adc46c85d04eb3948f00aceef15e4124988\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T09:29:58Z\\\",\\\"message\\\":\\\"from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0929 09:29:58.262686 6064 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0929 09:29:58.262891 6064 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0929 09:29:58.263707 6064 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0929 09:29:58.263780 6064 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0929 09:29:58.263810 6064 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0929 09:29:58.263848 6064 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0929 09:29:58.263946 6064 factory.go:656] Stopping watch factory\\\\nI0929 09:29:58.263982 6064 ovnkube.go:599] Stopped ovnkube\\\\nI0929 09:29:58.263866 6064 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0929 09:29:58.263881 6064 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0929 09:29:58.264033 6064 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0929 09:29:5\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:55Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97748c7b388a564dd18c6e13387163fc397c49cc10ed1cadaa7f44f95cc17741\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T09:29:59Z\\\",\\\"message\\\":\\\"de crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:59Z is after 2025-08-24T17:21:41Z]\\\\nI0929 09:29:59.880430 6181 services_controller.go:451] Built service openshift-machine-api/control-plane-machine-set-operator cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/control-plane-machine-set-operator_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/control-plane-machine-set-operator\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.41\\\\\\\", Port:9443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0929 09:29:59.880447 6181 services_controller.go:452] Built service openshift-machine-api/control-plane-machine-set-operator per-node LB for network=default: []services.LB{}\\\\nI0929 09:29:59.\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e140d46db39380a5737d78e1aa00ad1a5a9cc217de910ca4212beae87764795\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ncxc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:01Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.285403 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fxjq2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c469d18e-5b53-4a41-9305-87ae2f8db671\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqdj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqdj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:30:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fxjq2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:01Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.297390 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:01Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.362646 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.362683 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.362699 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.362720 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.362733 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:01Z","lastTransitionTime":"2025-09-29T09:30:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.379930 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fxjq2" Sep 29 09:30:01 crc kubenswrapper[4779]: W0929 09:30:01.390183 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc469d18e_5b53_4a41_9305_87ae2f8db671.slice/crio-2b5e63f9cf5cfb2eac0e9fbc514284317e24fdc797022c02048d3e2a33253da0 WatchSource:0}: Error finding container 2b5e63f9cf5cfb2eac0e9fbc514284317e24fdc797022c02048d3e2a33253da0: Status 404 returned error can't find the container with id 2b5e63f9cf5cfb2eac0e9fbc514284317e24fdc797022c02048d3e2a33253da0 Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.465041 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.465078 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.465089 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.465107 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.465118 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:01Z","lastTransitionTime":"2025-09-29T09:30:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.567154 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.567186 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.567195 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.567208 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.567219 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:01Z","lastTransitionTime":"2025-09-29T09:30:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.670580 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.670646 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.670672 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.670705 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.670728 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:01Z","lastTransitionTime":"2025-09-29T09:30:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.713506 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:30:01 crc kubenswrapper[4779]: E0929 09:30:01.713681 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.773295 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.773346 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.773357 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.773375 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.773386 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:01Z","lastTransitionTime":"2025-09-29T09:30:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.876029 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.876069 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.876079 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.876098 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.876109 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:01Z","lastTransitionTime":"2025-09-29T09:30:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.962655 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fxjq2" event={"ID":"c469d18e-5b53-4a41-9305-87ae2f8db671","Type":"ContainerStarted","Data":"01f30b5162df9f09f1af03ebeb5f7218a68b49bf4ff38cd06f3dad13d5042f88"} Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.962701 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fxjq2" event={"ID":"c469d18e-5b53-4a41-9305-87ae2f8db671","Type":"ContainerStarted","Data":"3c8ba3507185bd18ea878f73feed6ba33fb15a49a9f2575a66a88ac28fedef14"} Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.962710 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fxjq2" event={"ID":"c469d18e-5b53-4a41-9305-87ae2f8db671","Type":"ContainerStarted","Data":"2b5e63f9cf5cfb2eac0e9fbc514284317e24fdc797022c02048d3e2a33253da0"} Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.978328 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.978359 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.978367 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.978382 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.978392 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:01Z","lastTransitionTime":"2025-09-29T09:30:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:01 crc kubenswrapper[4779]: I0929 09:30:01.992403 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0846537-9071-453d-968b-7537a7233656\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://103d18548d558c96520d7c4c31d3ef44abc2905afa5c26ca3f93d2a1cd6ff604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ce06a39c8fd0fba9a0bd1af96ff34cfb10a153801ecff617db4df5e64eb476b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe3edd15b34d8223ba98808b34bee098cb2ffa1ac4fd3e949007acc04303e370\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22cc6e6e78a7eaac01a6c53d5e5527ef3120c8c44bf28813e65038f939e79872\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cc3416bb385ad0ff953b03c268f186747c47a2dfdf11ba34a3b00be5391e90e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8442bd56bf7c2e6708a3b0f6c21ba803d9bc76a84d32647b507d5961a78e8621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8442bd56bf7c2e6708a3b0f6c21ba803d9bc76a84d32647b507d5961a78e8621\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b03f58ad046bc93040f00ae10e8f265099417fcb84c10b4ca1707d6744729648\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b03f58ad046bc93040f00ae10e8f265099417fcb84c10b4ca1707d6744729648\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7e2abd0b0dc5d3b95c510100e11f7f2fa4b05786325b692244ea51633d2f597e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e2abd0b0dc5d3b95c510100e11f7f2fa4b05786325b692244ea51633d2f597e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:01Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.004885 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19a67159-61a5-4f75-940a-212c850bd498\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ff866eb8fdb247e8bfe638be9525a31733ed912047dcbbc7514ab6324675b6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a69616a6ca151dfda518aae66819aac108f73967794661b878db2480ffbb5b16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4bb91e73e9e2666ab82d5a13fee96ed7406f6178863603e607766ae3172d3ee6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2148aa80d9f71c8424530711e10af5b0bd10437655123ac57b87d1c90a280233\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:02Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.019449 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1a5d3a7-37d9-4a87-864c-e4af7f504a19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462a76735c6e380ec32967ba87ebe13a19e1d3687ac87182f01b23ce8fabe8e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7979507aa9ce4c5e1c0f9a0d07716433ab9cebcc02e8b6ba01251ced0bab99f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5lnlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:02Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.038359 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60d71749-dfb5-4095-b11b-b70f1a549b88\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a334fd50a045f97d97f67b45abf88dc6aaf3d7c2121becc6a9744c86c977b070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d79814aeee8cb50cbe9a3153ed1a99a82921ac740553c82f6aafd1d7f628cbdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7851ecb6e73d56037db56d7d12fec0a095403080daa7bbf0d4cd79d9a4fd1bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa8dee8aadd625828a22e22668d8f7295ddc3fe24976be6c8610da1157e28d7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a11183ba612a838b92d401b13a2e0d7d1c19de533ecef56b4305e5a9f0784bb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d6677148b1efa21f5eee1dcbccef6d6ea0492513bfa621b1a862488653bc8f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97748c7b388a564dd18c6e13387163fc397c49cc10ed1cadaa7f44f95cc17741\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://577276f8e790a07af7c396bfdfef2adc46c85d04eb3948f00aceef15e4124988\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T09:29:58Z\\\",\\\"message\\\":\\\"from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0929 09:29:58.262686 6064 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0929 09:29:58.262891 6064 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0929 09:29:58.263707 6064 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0929 09:29:58.263780 6064 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0929 09:29:58.263810 6064 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0929 09:29:58.263848 6064 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0929 09:29:58.263946 6064 factory.go:656] Stopping watch factory\\\\nI0929 09:29:58.263982 6064 ovnkube.go:599] Stopped ovnkube\\\\nI0929 09:29:58.263866 6064 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0929 09:29:58.263881 6064 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0929 09:29:58.264033 6064 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0929 09:29:5\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:55Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97748c7b388a564dd18c6e13387163fc397c49cc10ed1cadaa7f44f95cc17741\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T09:29:59Z\\\",\\\"message\\\":\\\"de crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:59Z is after 2025-08-24T17:21:41Z]\\\\nI0929 09:29:59.880430 6181 services_controller.go:451] Built service openshift-machine-api/control-plane-machine-set-operator cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/control-plane-machine-set-operator_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/control-plane-machine-set-operator\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.41\\\\\\\", Port:9443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0929 09:29:59.880447 6181 services_controller.go:452] Built service openshift-machine-api/control-plane-machine-set-operator per-node LB for network=default: []services.LB{}\\\\nI0929 09:29:59.\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e140d46db39380a5737d78e1aa00ad1a5a9cc217de910ca4212beae87764795\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ncxc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:02Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.051030 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fxjq2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c469d18e-5b53-4a41-9305-87ae2f8db671\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c8ba3507185bd18ea878f73feed6ba33fb15a49a9f2575a66a88ac28fedef14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:30:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqdj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://01f30b5162df9f09f1af03ebeb5f7218a68b49bf4ff38cd06f3dad13d5042f88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:30:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqdj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:30:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fxjq2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:02Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.070094 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:02Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.081458 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.081515 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.081531 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.081555 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.081572 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:02Z","lastTransitionTime":"2025-09-29T09:30:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.088771 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2adc78ec8f54811c8fd09a5e558132f9f57501d9a92d4738935148bb4e714eb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:02Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.102673 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5584" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"564bff56-93cb-42ac-bd34-bbe97f99f411\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedbde6cf29bb77c762d7da2d2e414d3e7c2c6edb3addd97473367b89b2e3fe7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvwtv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5584\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:02Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.116263 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gx6f2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25e887ba-720d-4f7b-9763-5703781fd8bf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a1604b3eb7d80cc28d7d36550e740405d3012bb83109444bf7354793ebbdeb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9lw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gx6f2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:02Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.131523 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:02Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.147776 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2tkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b0e23f7-a478-48e2-a745-193a90e87553\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://944ced544427edda31dc5b05053a08a83a71829a896cb6dba002ca20ee3e56e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5rx8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2tkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:02Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.165395 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-twvvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3a42c70c25df51312e2e5ede663678cbccf94d50dad43886557024669dd2e82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfe38229bcd948f0f8e1e234c8e4ea16d4dbc8c5c4252f5486048db0c9910412\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfe38229bcd948f0f8e1e234c8e4ea16d4dbc8c5c4252f5486048db0c9910412\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acbf355d9614f64771657a4103437e9c55f3b2b5080282d7ad3af70a5f44ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbf355d9614f64771657a4103437e9c55f3b2b5080282d7ad3af70a5f44ffce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b854329d7e329a9099a032b8e91ab2064164ae0e43e57b61c5c55b42fca2a9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b854329d7e329a9099a032b8e91ab2064164ae0e43e57b61c5c55b42fca2a9ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93312b300c9348ca8caa0f12bd7825d6b7d1d720babd765aa679c570220ac15a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93312b300c9348ca8caa0f12bd7825d6b7d1d720babd765aa679c570220ac15a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-twvvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:02Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.180143 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e291840-9db5-4515-810a-190428263dfb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6f627c2a2c06fbe034514d02939db8b7576b8cb753aa65a45a271482cf138c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde5335a7ca2db0e56ded3f2fc58506882df09718e6ada6dd1849b891ebc3dd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0f81c275d3f99ff02d4d105dfff34249cff38150c1aa6199bdc765773f0fa2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33e68ef001a27a30ef12923f95eaaef45667d18c8dab2997491caf12f595ed0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6837fc3f6d91505858a90e49945041b663c864b4ebcbd2ecc086c3575b47c1f7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T09:29:42Z\\\",\\\"message\\\":\\\"W0929 09:29:31.795697 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 09:29:31.796138 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759138171 cert, and key in /tmp/serving-cert-3245563206/serving-signer.crt, /tmp/serving-cert-3245563206/serving-signer.key\\\\nI0929 09:29:32.157414 1 observer_polling.go:159] Starting file observer\\\\nW0929 09:29:32.160455 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 09:29:32.160736 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 09:29:32.163324 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3245563206/tls.crt::/tmp/serving-cert-3245563206/tls.key\\\\\\\"\\\\nF0929 09:29:42.581603 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55099c1030bd4943e0df0b13b1203c7143f79edf5e02c8353ccd042610e8059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:02Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.183018 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-qvlbd"] Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.183952 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:30:02 crc kubenswrapper[4779]: E0929 09:30:02.184045 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvlbd" podUID="294a4484-da93-4c37-9ecf-18f68f4ad64d" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.188818 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.188875 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.188895 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.188941 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.188961 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:02Z","lastTransitionTime":"2025-09-29T09:30:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.200746 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:02Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.209145 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/294a4484-da93-4c37-9ecf-18f68f4ad64d-metrics-certs\") pod \"network-metrics-daemon-qvlbd\" (UID: \"294a4484-da93-4c37-9ecf-18f68f4ad64d\") " pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.209174 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9nsgx\" (UniqueName: \"kubernetes.io/projected/294a4484-da93-4c37-9ecf-18f68f4ad64d-kube-api-access-9nsgx\") pod \"network-metrics-daemon-qvlbd\" (UID: \"294a4484-da93-4c37-9ecf-18f68f4ad64d\") " pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.213668 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f96f3b3b0028e52c62e9b886d5f9c3347ec85f91ec0e5ef3de48bd216a2c687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f10690e584c904be791611af52b807ee3d20cb5551821042a1c5e71e1f1571d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:02Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.226662 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f75b386997d5f82c8a8136e8758dac2b770b2ef60bec255c70978046c3e6cd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:02Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.245786 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2tkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b0e23f7-a478-48e2-a745-193a90e87553\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://944ced544427edda31dc5b05053a08a83a71829a896cb6dba002ca20ee3e56e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5rx8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2tkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:02Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.260644 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-twvvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3a42c70c25df51312e2e5ede663678cbccf94d50dad43886557024669dd2e82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfe38229bcd948f0f8e1e234c8e4ea16d4dbc8c5c4252f5486048db0c9910412\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfe38229bcd948f0f8e1e234c8e4ea16d4dbc8c5c4252f5486048db0c9910412\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acbf355d9614f64771657a4103437e9c55f3b2b5080282d7ad3af70a5f44ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbf355d9614f64771657a4103437e9c55f3b2b5080282d7ad3af70a5f44ffce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b854329d7e329a9099a032b8e91ab2064164ae0e43e57b61c5c55b42fca2a9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b854329d7e329a9099a032b8e91ab2064164ae0e43e57b61c5c55b42fca2a9ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93312b300c9348ca8caa0f12bd7825d6b7d1d720babd765aa679c570220ac15a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93312b300c9348ca8caa0f12bd7825d6b7d1d720babd765aa679c570220ac15a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-twvvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:02Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.277481 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e291840-9db5-4515-810a-190428263dfb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6f627c2a2c06fbe034514d02939db8b7576b8cb753aa65a45a271482cf138c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde5335a7ca2db0e56ded3f2fc58506882df09718e6ada6dd1849b891ebc3dd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0f81c275d3f99ff02d4d105dfff34249cff38150c1aa6199bdc765773f0fa2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33e68ef001a27a30ef12923f95eaaef45667d18c8dab2997491caf12f595ed0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6837fc3f6d91505858a90e49945041b663c864b4ebcbd2ecc086c3575b47c1f7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T09:29:42Z\\\",\\\"message\\\":\\\"W0929 09:29:31.795697 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 09:29:31.796138 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759138171 cert, and key in /tmp/serving-cert-3245563206/serving-signer.crt, /tmp/serving-cert-3245563206/serving-signer.key\\\\nI0929 09:29:32.157414 1 observer_polling.go:159] Starting file observer\\\\nW0929 09:29:32.160455 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 09:29:32.160736 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 09:29:32.163324 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3245563206/tls.crt::/tmp/serving-cert-3245563206/tls.key\\\\\\\"\\\\nF0929 09:29:42.581603 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55099c1030bd4943e0df0b13b1203c7143f79edf5e02c8353ccd042610e8059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:02Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.291047 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:02Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.291582 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.291636 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.291653 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.291676 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.291693 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:02Z","lastTransitionTime":"2025-09-29T09:30:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.309340 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f96f3b3b0028e52c62e9b886d5f9c3347ec85f91ec0e5ef3de48bd216a2c687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f10690e584c904be791611af52b807ee3d20cb5551821042a1c5e71e1f1571d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:02Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.309937 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/294a4484-da93-4c37-9ecf-18f68f4ad64d-metrics-certs\") pod \"network-metrics-daemon-qvlbd\" (UID: \"294a4484-da93-4c37-9ecf-18f68f4ad64d\") " pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.309991 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9nsgx\" (UniqueName: \"kubernetes.io/projected/294a4484-da93-4c37-9ecf-18f68f4ad64d-kube-api-access-9nsgx\") pod \"network-metrics-daemon-qvlbd\" (UID: \"294a4484-da93-4c37-9ecf-18f68f4ad64d\") " pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:30:02 crc kubenswrapper[4779]: E0929 09:30:02.310168 4779 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 09:30:02 crc kubenswrapper[4779]: E0929 09:30:02.310265 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/294a4484-da93-4c37-9ecf-18f68f4ad64d-metrics-certs podName:294a4484-da93-4c37-9ecf-18f68f4ad64d nodeName:}" failed. No retries permitted until 2025-09-29 09:30:02.810236875 +0000 UTC m=+34.791560819 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/294a4484-da93-4c37-9ecf-18f68f4ad64d-metrics-certs") pod "network-metrics-daemon-qvlbd" (UID: "294a4484-da93-4c37-9ecf-18f68f4ad64d") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.325317 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f75b386997d5f82c8a8136e8758dac2b770b2ef60bec255c70978046c3e6cd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:02Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.333662 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9nsgx\" (UniqueName: \"kubernetes.io/projected/294a4484-da93-4c37-9ecf-18f68f4ad64d-kube-api-access-9nsgx\") pod \"network-metrics-daemon-qvlbd\" (UID: \"294a4484-da93-4c37-9ecf-18f68f4ad64d\") " pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.338954 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:02Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.363158 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0846537-9071-453d-968b-7537a7233656\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://103d18548d558c96520d7c4c31d3ef44abc2905afa5c26ca3f93d2a1cd6ff604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ce06a39c8fd0fba9a0bd1af96ff34cfb10a153801ecff617db4df5e64eb476b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe3edd15b34d8223ba98808b34bee098cb2ffa1ac4fd3e949007acc04303e370\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22cc6e6e78a7eaac01a6c53d5e5527ef3120c8c44bf28813e65038f939e79872\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cc3416bb385ad0ff953b03c268f186747c47a2dfdf11ba34a3b00be5391e90e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8442bd56bf7c2e6708a3b0f6c21ba803d9bc76a84d32647b507d5961a78e8621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8442bd56bf7c2e6708a3b0f6c21ba803d9bc76a84d32647b507d5961a78e8621\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b03f58ad046bc93040f00ae10e8f265099417fcb84c10b4ca1707d6744729648\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b03f58ad046bc93040f00ae10e8f265099417fcb84c10b4ca1707d6744729648\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7e2abd0b0dc5d3b95c510100e11f7f2fa4b05786325b692244ea51633d2f597e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e2abd0b0dc5d3b95c510100e11f7f2fa4b05786325b692244ea51633d2f597e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:02Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.378524 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19a67159-61a5-4f75-940a-212c850bd498\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ff866eb8fdb247e8bfe638be9525a31733ed912047dcbbc7514ab6324675b6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a69616a6ca151dfda518aae66819aac108f73967794661b878db2480ffbb5b16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4bb91e73e9e2666ab82d5a13fee96ed7406f6178863603e607766ae3172d3ee6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2148aa80d9f71c8424530711e10af5b0bd10437655123ac57b87d1c90a280233\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:02Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.394329 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1a5d3a7-37d9-4a87-864c-e4af7f504a19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462a76735c6e380ec32967ba87ebe13a19e1d3687ac87182f01b23ce8fabe8e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7979507aa9ce4c5e1c0f9a0d07716433ab9cebcc02e8b6ba01251ced0bab99f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5lnlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:02Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.394715 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.394774 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.394792 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.394815 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.394832 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:02Z","lastTransitionTime":"2025-09-29T09:30:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.420124 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60d71749-dfb5-4095-b11b-b70f1a549b88\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a334fd50a045f97d97f67b45abf88dc6aaf3d7c2121becc6a9744c86c977b070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d79814aeee8cb50cbe9a3153ed1a99a82921ac740553c82f6aafd1d7f628cbdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7851ecb6e73d56037db56d7d12fec0a095403080daa7bbf0d4cd79d9a4fd1bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa8dee8aadd625828a22e22668d8f7295ddc3fe24976be6c8610da1157e28d7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a11183ba612a838b92d401b13a2e0d7d1c19de533ecef56b4305e5a9f0784bb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d6677148b1efa21f5eee1dcbccef6d6ea0492513bfa621b1a862488653bc8f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97748c7b388a564dd18c6e13387163fc397c49cc10ed1cadaa7f44f95cc17741\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://577276f8e790a07af7c396bfdfef2adc46c85d04eb3948f00aceef15e4124988\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T09:29:58Z\\\",\\\"message\\\":\\\"from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0929 09:29:58.262686 6064 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0929 09:29:58.262891 6064 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0929 09:29:58.263707 6064 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0929 09:29:58.263780 6064 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0929 09:29:58.263810 6064 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0929 09:29:58.263848 6064 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0929 09:29:58.263946 6064 factory.go:656] Stopping watch factory\\\\nI0929 09:29:58.263982 6064 ovnkube.go:599] Stopped ovnkube\\\\nI0929 09:29:58.263866 6064 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0929 09:29:58.263881 6064 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0929 09:29:58.264033 6064 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0929 09:29:5\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:55Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97748c7b388a564dd18c6e13387163fc397c49cc10ed1cadaa7f44f95cc17741\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T09:29:59Z\\\",\\\"message\\\":\\\"de crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:59Z is after 2025-08-24T17:21:41Z]\\\\nI0929 09:29:59.880430 6181 services_controller.go:451] Built service openshift-machine-api/control-plane-machine-set-operator cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/control-plane-machine-set-operator_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/control-plane-machine-set-operator\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.41\\\\\\\", Port:9443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0929 09:29:59.880447 6181 services_controller.go:452] Built service openshift-machine-api/control-plane-machine-set-operator per-node LB for network=default: []services.LB{}\\\\nI0929 09:29:59.\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e140d46db39380a5737d78e1aa00ad1a5a9cc217de910ca4212beae87764795\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ncxc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:02Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.435485 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fxjq2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c469d18e-5b53-4a41-9305-87ae2f8db671\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c8ba3507185bd18ea878f73feed6ba33fb15a49a9f2575a66a88ac28fedef14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:30:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqdj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://01f30b5162df9f09f1af03ebeb5f7218a68b49bf4ff38cd06f3dad13d5042f88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:30:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqdj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:30:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fxjq2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:02Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.448357 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:02Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.460629 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2adc78ec8f54811c8fd09a5e558132f9f57501d9a92d4738935148bb4e714eb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:02Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.472088 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5584" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"564bff56-93cb-42ac-bd34-bbe97f99f411\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedbde6cf29bb77c762d7da2d2e414d3e7c2c6edb3addd97473367b89b2e3fe7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvwtv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5584\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:02Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.482820 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gx6f2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25e887ba-720d-4f7b-9763-5703781fd8bf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a1604b3eb7d80cc28d7d36550e740405d3012bb83109444bf7354793ebbdeb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9lw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gx6f2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:02Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.491457 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-qvlbd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"294a4484-da93-4c37-9ecf-18f68f4ad64d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nsgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nsgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:30:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-qvlbd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:02Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.497287 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.497329 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.497341 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.497359 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.497372 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:02Z","lastTransitionTime":"2025-09-29T09:30:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.599834 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.599894 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.599929 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.599948 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.599959 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:02Z","lastTransitionTime":"2025-09-29T09:30:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.703786 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.703858 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.703882 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.703943 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.703970 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:02Z","lastTransitionTime":"2025-09-29T09:30:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.713375 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.713425 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:30:02 crc kubenswrapper[4779]: E0929 09:30:02.713575 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 09:30:02 crc kubenswrapper[4779]: E0929 09:30:02.713666 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.806737 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.806820 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.806841 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.806864 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.806881 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:02Z","lastTransitionTime":"2025-09-29T09:30:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.814440 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/294a4484-da93-4c37-9ecf-18f68f4ad64d-metrics-certs\") pod \"network-metrics-daemon-qvlbd\" (UID: \"294a4484-da93-4c37-9ecf-18f68f4ad64d\") " pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:30:02 crc kubenswrapper[4779]: E0929 09:30:02.814643 4779 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 09:30:02 crc kubenswrapper[4779]: E0929 09:30:02.814766 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/294a4484-da93-4c37-9ecf-18f68f4ad64d-metrics-certs podName:294a4484-da93-4c37-9ecf-18f68f4ad64d nodeName:}" failed. No retries permitted until 2025-09-29 09:30:03.814740741 +0000 UTC m=+35.796064685 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/294a4484-da93-4c37-9ecf-18f68f4ad64d-metrics-certs") pod "network-metrics-daemon-qvlbd" (UID: "294a4484-da93-4c37-9ecf-18f68f4ad64d") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.910372 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.910441 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.910463 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.910490 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:02 crc kubenswrapper[4779]: I0929 09:30:02.910513 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:02Z","lastTransitionTime":"2025-09-29T09:30:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:03 crc kubenswrapper[4779]: I0929 09:30:03.014214 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:03 crc kubenswrapper[4779]: I0929 09:30:03.014271 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:03 crc kubenswrapper[4779]: I0929 09:30:03.014284 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:03 crc kubenswrapper[4779]: I0929 09:30:03.014305 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:03 crc kubenswrapper[4779]: I0929 09:30:03.014319 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:03Z","lastTransitionTime":"2025-09-29T09:30:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:03 crc kubenswrapper[4779]: I0929 09:30:03.118045 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:03 crc kubenswrapper[4779]: I0929 09:30:03.118099 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:03 crc kubenswrapper[4779]: I0929 09:30:03.118154 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:03 crc kubenswrapper[4779]: I0929 09:30:03.118181 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:03 crc kubenswrapper[4779]: I0929 09:30:03.118200 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:03Z","lastTransitionTime":"2025-09-29T09:30:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:03 crc kubenswrapper[4779]: I0929 09:30:03.224373 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:03 crc kubenswrapper[4779]: I0929 09:30:03.224428 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:03 crc kubenswrapper[4779]: I0929 09:30:03.224443 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:03 crc kubenswrapper[4779]: I0929 09:30:03.224462 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:03 crc kubenswrapper[4779]: I0929 09:30:03.224479 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:03Z","lastTransitionTime":"2025-09-29T09:30:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:03 crc kubenswrapper[4779]: I0929 09:30:03.328210 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:03 crc kubenswrapper[4779]: I0929 09:30:03.328686 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:03 crc kubenswrapper[4779]: I0929 09:30:03.328708 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:03 crc kubenswrapper[4779]: I0929 09:30:03.328732 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:03 crc kubenswrapper[4779]: I0929 09:30:03.328749 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:03Z","lastTransitionTime":"2025-09-29T09:30:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:03 crc kubenswrapper[4779]: I0929 09:30:03.431647 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:03 crc kubenswrapper[4779]: I0929 09:30:03.431719 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:03 crc kubenswrapper[4779]: I0929 09:30:03.431739 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:03 crc kubenswrapper[4779]: I0929 09:30:03.431763 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:03 crc kubenswrapper[4779]: I0929 09:30:03.431780 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:03Z","lastTransitionTime":"2025-09-29T09:30:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:03 crc kubenswrapper[4779]: I0929 09:30:03.534476 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:03 crc kubenswrapper[4779]: I0929 09:30:03.534531 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:03 crc kubenswrapper[4779]: I0929 09:30:03.534549 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:03 crc kubenswrapper[4779]: I0929 09:30:03.534575 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:03 crc kubenswrapper[4779]: I0929 09:30:03.534597 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:03Z","lastTransitionTime":"2025-09-29T09:30:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:03 crc kubenswrapper[4779]: I0929 09:30:03.638231 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:03 crc kubenswrapper[4779]: I0929 09:30:03.638295 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:03 crc kubenswrapper[4779]: I0929 09:30:03.638314 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:03 crc kubenswrapper[4779]: I0929 09:30:03.638339 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:03 crc kubenswrapper[4779]: I0929 09:30:03.638357 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:03Z","lastTransitionTime":"2025-09-29T09:30:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:03 crc kubenswrapper[4779]: I0929 09:30:03.713202 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:30:03 crc kubenswrapper[4779]: E0929 09:30:03.713442 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvlbd" podUID="294a4484-da93-4c37-9ecf-18f68f4ad64d" Sep 29 09:30:03 crc kubenswrapper[4779]: I0929 09:30:03.713510 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:30:03 crc kubenswrapper[4779]: E0929 09:30:03.713669 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 09:30:03 crc kubenswrapper[4779]: I0929 09:30:03.741277 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:03 crc kubenswrapper[4779]: I0929 09:30:03.741335 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:03 crc kubenswrapper[4779]: I0929 09:30:03.741358 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:03 crc kubenswrapper[4779]: I0929 09:30:03.741392 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:03 crc kubenswrapper[4779]: I0929 09:30:03.741414 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:03Z","lastTransitionTime":"2025-09-29T09:30:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:03 crc kubenswrapper[4779]: I0929 09:30:03.824234 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/294a4484-da93-4c37-9ecf-18f68f4ad64d-metrics-certs\") pod \"network-metrics-daemon-qvlbd\" (UID: \"294a4484-da93-4c37-9ecf-18f68f4ad64d\") " pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:30:03 crc kubenswrapper[4779]: E0929 09:30:03.824485 4779 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 09:30:03 crc kubenswrapper[4779]: E0929 09:30:03.824552 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/294a4484-da93-4c37-9ecf-18f68f4ad64d-metrics-certs podName:294a4484-da93-4c37-9ecf-18f68f4ad64d nodeName:}" failed. No retries permitted until 2025-09-29 09:30:05.824528945 +0000 UTC m=+37.805852879 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/294a4484-da93-4c37-9ecf-18f68f4ad64d-metrics-certs") pod "network-metrics-daemon-qvlbd" (UID: "294a4484-da93-4c37-9ecf-18f68f4ad64d") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 09:30:03 crc kubenswrapper[4779]: I0929 09:30:03.844470 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:03 crc kubenswrapper[4779]: I0929 09:30:03.844524 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:03 crc kubenswrapper[4779]: I0929 09:30:03.844542 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:03 crc kubenswrapper[4779]: I0929 09:30:03.844566 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:03 crc kubenswrapper[4779]: I0929 09:30:03.844589 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:03Z","lastTransitionTime":"2025-09-29T09:30:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:03 crc kubenswrapper[4779]: I0929 09:30:03.948366 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:03 crc kubenswrapper[4779]: I0929 09:30:03.948464 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:03 crc kubenswrapper[4779]: I0929 09:30:03.948489 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:03 crc kubenswrapper[4779]: I0929 09:30:03.948518 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:03 crc kubenswrapper[4779]: I0929 09:30:03.948537 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:03Z","lastTransitionTime":"2025-09-29T09:30:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:04 crc kubenswrapper[4779]: I0929 09:30:04.051518 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:04 crc kubenswrapper[4779]: I0929 09:30:04.051590 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:04 crc kubenswrapper[4779]: I0929 09:30:04.051617 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:04 crc kubenswrapper[4779]: I0929 09:30:04.051650 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:04 crc kubenswrapper[4779]: I0929 09:30:04.051673 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:04Z","lastTransitionTime":"2025-09-29T09:30:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:04 crc kubenswrapper[4779]: I0929 09:30:04.155550 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:04 crc kubenswrapper[4779]: I0929 09:30:04.155624 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:04 crc kubenswrapper[4779]: I0929 09:30:04.155643 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:04 crc kubenswrapper[4779]: I0929 09:30:04.155668 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:04 crc kubenswrapper[4779]: I0929 09:30:04.155689 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:04Z","lastTransitionTime":"2025-09-29T09:30:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:04 crc kubenswrapper[4779]: I0929 09:30:04.259267 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:04 crc kubenswrapper[4779]: I0929 09:30:04.259319 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:04 crc kubenswrapper[4779]: I0929 09:30:04.259330 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:04 crc kubenswrapper[4779]: I0929 09:30:04.259350 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:04 crc kubenswrapper[4779]: I0929 09:30:04.259364 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:04Z","lastTransitionTime":"2025-09-29T09:30:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:04 crc kubenswrapper[4779]: I0929 09:30:04.362749 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:04 crc kubenswrapper[4779]: I0929 09:30:04.362827 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:04 crc kubenswrapper[4779]: I0929 09:30:04.362851 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:04 crc kubenswrapper[4779]: I0929 09:30:04.362881 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:04 crc kubenswrapper[4779]: I0929 09:30:04.362930 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:04Z","lastTransitionTime":"2025-09-29T09:30:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:04 crc kubenswrapper[4779]: I0929 09:30:04.465898 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:04 crc kubenswrapper[4779]: I0929 09:30:04.465989 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:04 crc kubenswrapper[4779]: I0929 09:30:04.466005 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:04 crc kubenswrapper[4779]: I0929 09:30:04.466025 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:04 crc kubenswrapper[4779]: I0929 09:30:04.466038 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:04Z","lastTransitionTime":"2025-09-29T09:30:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:04 crc kubenswrapper[4779]: I0929 09:30:04.533526 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 09:30:04 crc kubenswrapper[4779]: I0929 09:30:04.533719 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:30:04 crc kubenswrapper[4779]: I0929 09:30:04.533762 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:30:04 crc kubenswrapper[4779]: E0929 09:30:04.533938 4779 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 09:30:04 crc kubenswrapper[4779]: E0929 09:30:04.533946 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 09:30:20.533846985 +0000 UTC m=+52.515170969 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:30:04 crc kubenswrapper[4779]: E0929 09:30:04.534015 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 09:30:20.533995289 +0000 UTC m=+52.515319213 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 09:30:04 crc kubenswrapper[4779]: E0929 09:30:04.534122 4779 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 09:30:04 crc kubenswrapper[4779]: E0929 09:30:04.534213 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 09:30:20.534189155 +0000 UTC m=+52.515513269 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 09:30:04 crc kubenswrapper[4779]: I0929 09:30:04.569541 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:04 crc kubenswrapper[4779]: I0929 09:30:04.569623 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:04 crc kubenswrapper[4779]: I0929 09:30:04.569643 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:04 crc kubenswrapper[4779]: I0929 09:30:04.569679 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:04 crc kubenswrapper[4779]: I0929 09:30:04.569701 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:04Z","lastTransitionTime":"2025-09-29T09:30:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:04 crc kubenswrapper[4779]: I0929 09:30:04.634674 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:30:04 crc kubenswrapper[4779]: I0929 09:30:04.634740 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:30:04 crc kubenswrapper[4779]: E0929 09:30:04.634937 4779 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 09:30:04 crc kubenswrapper[4779]: E0929 09:30:04.634958 4779 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 09:30:04 crc kubenswrapper[4779]: E0929 09:30:04.634971 4779 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 09:30:04 crc kubenswrapper[4779]: E0929 09:30:04.635033 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-29 09:30:20.63501814 +0000 UTC m=+52.616342044 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 09:30:04 crc kubenswrapper[4779]: E0929 09:30:04.635074 4779 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 09:30:04 crc kubenswrapper[4779]: E0929 09:30:04.635129 4779 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 09:30:04 crc kubenswrapper[4779]: E0929 09:30:04.635152 4779 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 09:30:04 crc kubenswrapper[4779]: E0929 09:30:04.635244 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-29 09:30:20.635214426 +0000 UTC m=+52.616538370 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 09:30:04 crc kubenswrapper[4779]: I0929 09:30:04.673656 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:04 crc kubenswrapper[4779]: I0929 09:30:04.673738 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:04 crc kubenswrapper[4779]: I0929 09:30:04.673765 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:04 crc kubenswrapper[4779]: I0929 09:30:04.673801 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:04 crc kubenswrapper[4779]: I0929 09:30:04.673832 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:04Z","lastTransitionTime":"2025-09-29T09:30:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:04 crc kubenswrapper[4779]: I0929 09:30:04.713714 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:30:04 crc kubenswrapper[4779]: I0929 09:30:04.713737 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:30:04 crc kubenswrapper[4779]: E0929 09:30:04.714044 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 09:30:04 crc kubenswrapper[4779]: E0929 09:30:04.714221 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 09:30:04 crc kubenswrapper[4779]: I0929 09:30:04.777558 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:04 crc kubenswrapper[4779]: I0929 09:30:04.777762 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:04 crc kubenswrapper[4779]: I0929 09:30:04.777836 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:04 crc kubenswrapper[4779]: I0929 09:30:04.777862 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:04 crc kubenswrapper[4779]: I0929 09:30:04.777879 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:04Z","lastTransitionTime":"2025-09-29T09:30:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:04 crc kubenswrapper[4779]: I0929 09:30:04.882585 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:04 crc kubenswrapper[4779]: I0929 09:30:04.882664 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:04 crc kubenswrapper[4779]: I0929 09:30:04.882684 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:04 crc kubenswrapper[4779]: I0929 09:30:04.882713 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:04 crc kubenswrapper[4779]: I0929 09:30:04.882736 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:04Z","lastTransitionTime":"2025-09-29T09:30:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:04 crc kubenswrapper[4779]: I0929 09:30:04.985580 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:04 crc kubenswrapper[4779]: I0929 09:30:04.985645 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:04 crc kubenswrapper[4779]: I0929 09:30:04.985664 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:04 crc kubenswrapper[4779]: I0929 09:30:04.985694 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:04 crc kubenswrapper[4779]: I0929 09:30:04.985714 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:04Z","lastTransitionTime":"2025-09-29T09:30:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:05 crc kubenswrapper[4779]: I0929 09:30:05.088628 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:05 crc kubenswrapper[4779]: I0929 09:30:05.088711 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:05 crc kubenswrapper[4779]: I0929 09:30:05.088736 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:05 crc kubenswrapper[4779]: I0929 09:30:05.088773 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:05 crc kubenswrapper[4779]: I0929 09:30:05.088807 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:05Z","lastTransitionTime":"2025-09-29T09:30:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:05 crc kubenswrapper[4779]: I0929 09:30:05.193088 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:05 crc kubenswrapper[4779]: I0929 09:30:05.193167 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:05 crc kubenswrapper[4779]: I0929 09:30:05.193186 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:05 crc kubenswrapper[4779]: I0929 09:30:05.193213 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:05 crc kubenswrapper[4779]: I0929 09:30:05.193238 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:05Z","lastTransitionTime":"2025-09-29T09:30:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:05 crc kubenswrapper[4779]: I0929 09:30:05.297115 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:05 crc kubenswrapper[4779]: I0929 09:30:05.297189 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:05 crc kubenswrapper[4779]: I0929 09:30:05.297214 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:05 crc kubenswrapper[4779]: I0929 09:30:05.297246 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:05 crc kubenswrapper[4779]: I0929 09:30:05.297274 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:05Z","lastTransitionTime":"2025-09-29T09:30:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:05 crc kubenswrapper[4779]: I0929 09:30:05.400558 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:05 crc kubenswrapper[4779]: I0929 09:30:05.400605 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:05 crc kubenswrapper[4779]: I0929 09:30:05.400622 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:05 crc kubenswrapper[4779]: I0929 09:30:05.400645 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:05 crc kubenswrapper[4779]: I0929 09:30:05.400661 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:05Z","lastTransitionTime":"2025-09-29T09:30:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:05 crc kubenswrapper[4779]: I0929 09:30:05.504356 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:05 crc kubenswrapper[4779]: I0929 09:30:05.504446 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:05 crc kubenswrapper[4779]: I0929 09:30:05.504464 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:05 crc kubenswrapper[4779]: I0929 09:30:05.504494 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:05 crc kubenswrapper[4779]: I0929 09:30:05.504514 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:05Z","lastTransitionTime":"2025-09-29T09:30:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:05 crc kubenswrapper[4779]: I0929 09:30:05.608775 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:05 crc kubenswrapper[4779]: I0929 09:30:05.608851 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:05 crc kubenswrapper[4779]: I0929 09:30:05.608870 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:05 crc kubenswrapper[4779]: I0929 09:30:05.608933 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:05 crc kubenswrapper[4779]: I0929 09:30:05.608957 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:05Z","lastTransitionTime":"2025-09-29T09:30:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:05 crc kubenswrapper[4779]: I0929 09:30:05.712859 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:05 crc kubenswrapper[4779]: I0929 09:30:05.712959 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:05 crc kubenswrapper[4779]: I0929 09:30:05.712977 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:05 crc kubenswrapper[4779]: I0929 09:30:05.713042 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:05 crc kubenswrapper[4779]: I0929 09:30:05.713062 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:05Z","lastTransitionTime":"2025-09-29T09:30:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:05 crc kubenswrapper[4779]: I0929 09:30:05.713269 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:30:05 crc kubenswrapper[4779]: E0929 09:30:05.713388 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 09:30:05 crc kubenswrapper[4779]: I0929 09:30:05.713278 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:30:05 crc kubenswrapper[4779]: E0929 09:30:05.713646 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvlbd" podUID="294a4484-da93-4c37-9ecf-18f68f4ad64d" Sep 29 09:30:05 crc kubenswrapper[4779]: I0929 09:30:05.816429 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:05 crc kubenswrapper[4779]: I0929 09:30:05.816486 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:05 crc kubenswrapper[4779]: I0929 09:30:05.816504 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:05 crc kubenswrapper[4779]: I0929 09:30:05.816532 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:05 crc kubenswrapper[4779]: I0929 09:30:05.816551 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:05Z","lastTransitionTime":"2025-09-29T09:30:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:05 crc kubenswrapper[4779]: I0929 09:30:05.853685 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/294a4484-da93-4c37-9ecf-18f68f4ad64d-metrics-certs\") pod \"network-metrics-daemon-qvlbd\" (UID: \"294a4484-da93-4c37-9ecf-18f68f4ad64d\") " pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:30:05 crc kubenswrapper[4779]: E0929 09:30:05.853998 4779 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 09:30:05 crc kubenswrapper[4779]: E0929 09:30:05.854099 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/294a4484-da93-4c37-9ecf-18f68f4ad64d-metrics-certs podName:294a4484-da93-4c37-9ecf-18f68f4ad64d nodeName:}" failed. No retries permitted until 2025-09-29 09:30:09.854069023 +0000 UTC m=+41.835392947 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/294a4484-da93-4c37-9ecf-18f68f4ad64d-metrics-certs") pod "network-metrics-daemon-qvlbd" (UID: "294a4484-da93-4c37-9ecf-18f68f4ad64d") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 09:30:05 crc kubenswrapper[4779]: I0929 09:30:05.920781 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:05 crc kubenswrapper[4779]: I0929 09:30:05.920845 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:05 crc kubenswrapper[4779]: I0929 09:30:05.920858 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:05 crc kubenswrapper[4779]: I0929 09:30:05.920882 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:05 crc kubenswrapper[4779]: I0929 09:30:05.920898 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:05Z","lastTransitionTime":"2025-09-29T09:30:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:06 crc kubenswrapper[4779]: I0929 09:30:06.023692 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:06 crc kubenswrapper[4779]: I0929 09:30:06.023740 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:06 crc kubenswrapper[4779]: I0929 09:30:06.023753 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:06 crc kubenswrapper[4779]: I0929 09:30:06.023773 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:06 crc kubenswrapper[4779]: I0929 09:30:06.023782 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:06Z","lastTransitionTime":"2025-09-29T09:30:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:06 crc kubenswrapper[4779]: I0929 09:30:06.128276 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:06 crc kubenswrapper[4779]: I0929 09:30:06.128357 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:06 crc kubenswrapper[4779]: I0929 09:30:06.128396 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:06 crc kubenswrapper[4779]: I0929 09:30:06.128435 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:06 crc kubenswrapper[4779]: I0929 09:30:06.128460 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:06Z","lastTransitionTime":"2025-09-29T09:30:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:06 crc kubenswrapper[4779]: I0929 09:30:06.234418 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:06 crc kubenswrapper[4779]: I0929 09:30:06.234507 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:06 crc kubenswrapper[4779]: I0929 09:30:06.234534 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:06 crc kubenswrapper[4779]: I0929 09:30:06.234574 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:06 crc kubenswrapper[4779]: I0929 09:30:06.234605 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:06Z","lastTransitionTime":"2025-09-29T09:30:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:06 crc kubenswrapper[4779]: I0929 09:30:06.338054 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:06 crc kubenswrapper[4779]: I0929 09:30:06.338123 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:06 crc kubenswrapper[4779]: I0929 09:30:06.338140 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:06 crc kubenswrapper[4779]: I0929 09:30:06.338167 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:06 crc kubenswrapper[4779]: I0929 09:30:06.338184 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:06Z","lastTransitionTime":"2025-09-29T09:30:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:06 crc kubenswrapper[4779]: I0929 09:30:06.442177 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:06 crc kubenswrapper[4779]: I0929 09:30:06.442272 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:06 crc kubenswrapper[4779]: I0929 09:30:06.442328 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:06 crc kubenswrapper[4779]: I0929 09:30:06.442365 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:06 crc kubenswrapper[4779]: I0929 09:30:06.442390 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:06Z","lastTransitionTime":"2025-09-29T09:30:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:06 crc kubenswrapper[4779]: I0929 09:30:06.546109 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:06 crc kubenswrapper[4779]: I0929 09:30:06.546188 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:06 crc kubenswrapper[4779]: I0929 09:30:06.546212 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:06 crc kubenswrapper[4779]: I0929 09:30:06.546248 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:06 crc kubenswrapper[4779]: I0929 09:30:06.546274 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:06Z","lastTransitionTime":"2025-09-29T09:30:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:06 crc kubenswrapper[4779]: I0929 09:30:06.649621 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:06 crc kubenswrapper[4779]: I0929 09:30:06.649676 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:06 crc kubenswrapper[4779]: I0929 09:30:06.649685 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:06 crc kubenswrapper[4779]: I0929 09:30:06.649703 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:06 crc kubenswrapper[4779]: I0929 09:30:06.649715 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:06Z","lastTransitionTime":"2025-09-29T09:30:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:06 crc kubenswrapper[4779]: I0929 09:30:06.713954 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:30:06 crc kubenswrapper[4779]: I0929 09:30:06.714031 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:30:06 crc kubenswrapper[4779]: E0929 09:30:06.714251 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 09:30:06 crc kubenswrapper[4779]: E0929 09:30:06.714372 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 09:30:06 crc kubenswrapper[4779]: I0929 09:30:06.752940 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:06 crc kubenswrapper[4779]: I0929 09:30:06.753043 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:06 crc kubenswrapper[4779]: I0929 09:30:06.753061 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:06 crc kubenswrapper[4779]: I0929 09:30:06.753095 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:06 crc kubenswrapper[4779]: I0929 09:30:06.753121 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:06Z","lastTransitionTime":"2025-09-29T09:30:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:06 crc kubenswrapper[4779]: I0929 09:30:06.857107 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:06 crc kubenswrapper[4779]: I0929 09:30:06.857186 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:06 crc kubenswrapper[4779]: I0929 09:30:06.857212 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:06 crc kubenswrapper[4779]: I0929 09:30:06.857279 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:06 crc kubenswrapper[4779]: I0929 09:30:06.857300 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:06Z","lastTransitionTime":"2025-09-29T09:30:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:06 crc kubenswrapper[4779]: I0929 09:30:06.961036 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:06 crc kubenswrapper[4779]: I0929 09:30:06.961143 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:06 crc kubenswrapper[4779]: I0929 09:30:06.961174 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:06 crc kubenswrapper[4779]: I0929 09:30:06.961248 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:06 crc kubenswrapper[4779]: I0929 09:30:06.961277 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:06Z","lastTransitionTime":"2025-09-29T09:30:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:07 crc kubenswrapper[4779]: I0929 09:30:07.065325 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:07 crc kubenswrapper[4779]: I0929 09:30:07.065375 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:07 crc kubenswrapper[4779]: I0929 09:30:07.065390 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:07 crc kubenswrapper[4779]: I0929 09:30:07.065415 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:07 crc kubenswrapper[4779]: I0929 09:30:07.065428 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:07Z","lastTransitionTime":"2025-09-29T09:30:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:07 crc kubenswrapper[4779]: I0929 09:30:07.169643 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:07 crc kubenswrapper[4779]: I0929 09:30:07.169737 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:07 crc kubenswrapper[4779]: I0929 09:30:07.169763 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:07 crc kubenswrapper[4779]: I0929 09:30:07.169796 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:07 crc kubenswrapper[4779]: I0929 09:30:07.169820 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:07Z","lastTransitionTime":"2025-09-29T09:30:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:07 crc kubenswrapper[4779]: I0929 09:30:07.272721 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:07 crc kubenswrapper[4779]: I0929 09:30:07.272781 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:07 crc kubenswrapper[4779]: I0929 09:30:07.272802 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:07 crc kubenswrapper[4779]: I0929 09:30:07.272825 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:07 crc kubenswrapper[4779]: I0929 09:30:07.272851 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:07Z","lastTransitionTime":"2025-09-29T09:30:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:07 crc kubenswrapper[4779]: I0929 09:30:07.376401 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:07 crc kubenswrapper[4779]: I0929 09:30:07.376498 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:07 crc kubenswrapper[4779]: I0929 09:30:07.376524 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:07 crc kubenswrapper[4779]: I0929 09:30:07.376560 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:07 crc kubenswrapper[4779]: I0929 09:30:07.376585 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:07Z","lastTransitionTime":"2025-09-29T09:30:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:07 crc kubenswrapper[4779]: I0929 09:30:07.485354 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:07 crc kubenswrapper[4779]: I0929 09:30:07.485392 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:07 crc kubenswrapper[4779]: I0929 09:30:07.485402 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:07 crc kubenswrapper[4779]: I0929 09:30:07.485417 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:07 crc kubenswrapper[4779]: I0929 09:30:07.485426 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:07Z","lastTransitionTime":"2025-09-29T09:30:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:07 crc kubenswrapper[4779]: I0929 09:30:07.588498 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:07 crc kubenswrapper[4779]: I0929 09:30:07.588556 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:07 crc kubenswrapper[4779]: I0929 09:30:07.588573 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:07 crc kubenswrapper[4779]: I0929 09:30:07.588598 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:07 crc kubenswrapper[4779]: I0929 09:30:07.588617 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:07Z","lastTransitionTime":"2025-09-29T09:30:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:07 crc kubenswrapper[4779]: I0929 09:30:07.692438 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:07 crc kubenswrapper[4779]: I0929 09:30:07.692519 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:07 crc kubenswrapper[4779]: I0929 09:30:07.692540 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:07 crc kubenswrapper[4779]: I0929 09:30:07.692578 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:07 crc kubenswrapper[4779]: I0929 09:30:07.692608 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:07Z","lastTransitionTime":"2025-09-29T09:30:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:07 crc kubenswrapper[4779]: I0929 09:30:07.714102 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:30:07 crc kubenswrapper[4779]: I0929 09:30:07.714305 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:30:07 crc kubenswrapper[4779]: E0929 09:30:07.714370 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvlbd" podUID="294a4484-da93-4c37-9ecf-18f68f4ad64d" Sep 29 09:30:07 crc kubenswrapper[4779]: E0929 09:30:07.714621 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 09:30:07 crc kubenswrapper[4779]: I0929 09:30:07.799200 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:07 crc kubenswrapper[4779]: I0929 09:30:07.799628 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:07 crc kubenswrapper[4779]: I0929 09:30:07.799795 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:07 crc kubenswrapper[4779]: I0929 09:30:07.799957 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:07 crc kubenswrapper[4779]: I0929 09:30:07.800083 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:07Z","lastTransitionTime":"2025-09-29T09:30:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:07 crc kubenswrapper[4779]: I0929 09:30:07.903837 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:07 crc kubenswrapper[4779]: I0929 09:30:07.903889 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:07 crc kubenswrapper[4779]: I0929 09:30:07.903940 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:07 crc kubenswrapper[4779]: I0929 09:30:07.903964 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:07 crc kubenswrapper[4779]: I0929 09:30:07.903984 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:07Z","lastTransitionTime":"2025-09-29T09:30:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.006996 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.007052 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.007069 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.007092 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.007110 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:08Z","lastTransitionTime":"2025-09-29T09:30:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.110136 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.110209 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.110231 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.110257 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.110277 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:08Z","lastTransitionTime":"2025-09-29T09:30:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.213477 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.213547 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.213570 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.213599 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.213621 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:08Z","lastTransitionTime":"2025-09-29T09:30:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.317558 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.317636 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.317662 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.317694 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.317714 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:08Z","lastTransitionTime":"2025-09-29T09:30:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.373893 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.373971 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.373982 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.374002 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.374015 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:08Z","lastTransitionTime":"2025-09-29T09:30:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:08 crc kubenswrapper[4779]: E0929 09:30:08.389163 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"25d3a4e6-deea-47ab-ac6b-f80ccadc03c7\\\",\\\"systemUUID\\\":\\\"6af97324-aa9b-4cb6-ab41-66056c52c25a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:08Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.392815 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.392855 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.392870 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.392892 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.392924 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:08Z","lastTransitionTime":"2025-09-29T09:30:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:08 crc kubenswrapper[4779]: E0929 09:30:08.406538 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"25d3a4e6-deea-47ab-ac6b-f80ccadc03c7\\\",\\\"systemUUID\\\":\\\"6af97324-aa9b-4cb6-ab41-66056c52c25a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:08Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.411042 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.411252 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.411399 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.411576 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.411740 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:08Z","lastTransitionTime":"2025-09-29T09:30:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:08 crc kubenswrapper[4779]: E0929 09:30:08.425835 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"25d3a4e6-deea-47ab-ac6b-f80ccadc03c7\\\",\\\"systemUUID\\\":\\\"6af97324-aa9b-4cb6-ab41-66056c52c25a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:08Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.429642 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.429679 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.429688 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.429705 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.429714 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:08Z","lastTransitionTime":"2025-09-29T09:30:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:08 crc kubenswrapper[4779]: E0929 09:30:08.442264 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"25d3a4e6-deea-47ab-ac6b-f80ccadc03c7\\\",\\\"systemUUID\\\":\\\"6af97324-aa9b-4cb6-ab41-66056c52c25a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:08Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.446401 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.446436 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.446447 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.446465 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.446478 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:08Z","lastTransitionTime":"2025-09-29T09:30:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:08 crc kubenswrapper[4779]: E0929 09:30:08.460398 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"25d3a4e6-deea-47ab-ac6b-f80ccadc03c7\\\",\\\"systemUUID\\\":\\\"6af97324-aa9b-4cb6-ab41-66056c52c25a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:08Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:08 crc kubenswrapper[4779]: E0929 09:30:08.460899 4779 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.462736 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.462859 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.463005 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.463139 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.463251 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:08Z","lastTransitionTime":"2025-09-29T09:30:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.565861 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.565940 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.565956 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.565978 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.565993 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:08Z","lastTransitionTime":"2025-09-29T09:30:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.670427 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.670470 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.670481 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.670499 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.670511 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:08Z","lastTransitionTime":"2025-09-29T09:30:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.714051 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:30:08 crc kubenswrapper[4779]: E0929 09:30:08.714180 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.714323 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:30:08 crc kubenswrapper[4779]: E0929 09:30:08.714495 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.734090 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2tkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b0e23f7-a478-48e2-a745-193a90e87553\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://944ced544427edda31dc5b05053a08a83a71829a896cb6dba002ca20ee3e56e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5rx8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2tkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:08Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.752600 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-twvvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3a42c70c25df51312e2e5ede663678cbccf94d50dad43886557024669dd2e82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfe38229bcd948f0f8e1e234c8e4ea16d4dbc8c5c4252f5486048db0c9910412\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfe38229bcd948f0f8e1e234c8e4ea16d4dbc8c5c4252f5486048db0c9910412\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acbf355d9614f64771657a4103437e9c55f3b2b5080282d7ad3af70a5f44ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbf355d9614f64771657a4103437e9c55f3b2b5080282d7ad3af70a5f44ffce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b854329d7e329a9099a032b8e91ab2064164ae0e43e57b61c5c55b42fca2a9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b854329d7e329a9099a032b8e91ab2064164ae0e43e57b61c5c55b42fca2a9ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93312b300c9348ca8caa0f12bd7825d6b7d1d720babd765aa679c570220ac15a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93312b300c9348ca8caa0f12bd7825d6b7d1d720babd765aa679c570220ac15a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-twvvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:08Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.769083 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e291840-9db5-4515-810a-190428263dfb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6f627c2a2c06fbe034514d02939db8b7576b8cb753aa65a45a271482cf138c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde5335a7ca2db0e56ded3f2fc58506882df09718e6ada6dd1849b891ebc3dd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0f81c275d3f99ff02d4d105dfff34249cff38150c1aa6199bdc765773f0fa2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33e68ef001a27a30ef12923f95eaaef45667d18c8dab2997491caf12f595ed0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6837fc3f6d91505858a90e49945041b663c864b4ebcbd2ecc086c3575b47c1f7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T09:29:42Z\\\",\\\"message\\\":\\\"W0929 09:29:31.795697 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 09:29:31.796138 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759138171 cert, and key in /tmp/serving-cert-3245563206/serving-signer.crt, /tmp/serving-cert-3245563206/serving-signer.key\\\\nI0929 09:29:32.157414 1 observer_polling.go:159] Starting file observer\\\\nW0929 09:29:32.160455 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 09:29:32.160736 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 09:29:32.163324 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3245563206/tls.crt::/tmp/serving-cert-3245563206/tls.key\\\\\\\"\\\\nF0929 09:29:42.581603 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55099c1030bd4943e0df0b13b1203c7143f79edf5e02c8353ccd042610e8059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:08Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.772418 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.772476 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.772495 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.772519 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.772536 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:08Z","lastTransitionTime":"2025-09-29T09:30:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.784821 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:08Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.802322 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f96f3b3b0028e52c62e9b886d5f9c3347ec85f91ec0e5ef3de48bd216a2c687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f10690e584c904be791611af52b807ee3d20cb5551821042a1c5e71e1f1571d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:08Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.823017 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f75b386997d5f82c8a8136e8758dac2b770b2ef60bec255c70978046c3e6cd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:08Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.841364 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:08Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.870747 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0846537-9071-453d-968b-7537a7233656\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://103d18548d558c96520d7c4c31d3ef44abc2905afa5c26ca3f93d2a1cd6ff604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ce06a39c8fd0fba9a0bd1af96ff34cfb10a153801ecff617db4df5e64eb476b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe3edd15b34d8223ba98808b34bee098cb2ffa1ac4fd3e949007acc04303e370\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22cc6e6e78a7eaac01a6c53d5e5527ef3120c8c44bf28813e65038f939e79872\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cc3416bb385ad0ff953b03c268f186747c47a2dfdf11ba34a3b00be5391e90e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8442bd56bf7c2e6708a3b0f6c21ba803d9bc76a84d32647b507d5961a78e8621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8442bd56bf7c2e6708a3b0f6c21ba803d9bc76a84d32647b507d5961a78e8621\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b03f58ad046bc93040f00ae10e8f265099417fcb84c10b4ca1707d6744729648\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b03f58ad046bc93040f00ae10e8f265099417fcb84c10b4ca1707d6744729648\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7e2abd0b0dc5d3b95c510100e11f7f2fa4b05786325b692244ea51633d2f597e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e2abd0b0dc5d3b95c510100e11f7f2fa4b05786325b692244ea51633d2f597e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:08Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.875094 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.875169 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.875195 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.875228 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.875255 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:08Z","lastTransitionTime":"2025-09-29T09:30:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.889164 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19a67159-61a5-4f75-940a-212c850bd498\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ff866eb8fdb247e8bfe638be9525a31733ed912047dcbbc7514ab6324675b6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a69616a6ca151dfda518aae66819aac108f73967794661b878db2480ffbb5b16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4bb91e73e9e2666ab82d5a13fee96ed7406f6178863603e607766ae3172d3ee6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2148aa80d9f71c8424530711e10af5b0bd10437655123ac57b87d1c90a280233\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:08Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.906730 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1a5d3a7-37d9-4a87-864c-e4af7f504a19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462a76735c6e380ec32967ba87ebe13a19e1d3687ac87182f01b23ce8fabe8e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7979507aa9ce4c5e1c0f9a0d07716433ab9cebcc02e8b6ba01251ced0bab99f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5lnlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:08Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.938640 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60d71749-dfb5-4095-b11b-b70f1a549b88\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a334fd50a045f97d97f67b45abf88dc6aaf3d7c2121becc6a9744c86c977b070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d79814aeee8cb50cbe9a3153ed1a99a82921ac740553c82f6aafd1d7f628cbdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7851ecb6e73d56037db56d7d12fec0a095403080daa7bbf0d4cd79d9a4fd1bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa8dee8aadd625828a22e22668d8f7295ddc3fe24976be6c8610da1157e28d7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a11183ba612a838b92d401b13a2e0d7d1c19de533ecef56b4305e5a9f0784bb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d6677148b1efa21f5eee1dcbccef6d6ea0492513bfa621b1a862488653bc8f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97748c7b388a564dd18c6e13387163fc397c49cc10ed1cadaa7f44f95cc17741\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://577276f8e790a07af7c396bfdfef2adc46c85d04eb3948f00aceef15e4124988\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T09:29:58Z\\\",\\\"message\\\":\\\"from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0929 09:29:58.262686 6064 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0929 09:29:58.262891 6064 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0929 09:29:58.263707 6064 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0929 09:29:58.263780 6064 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0929 09:29:58.263810 6064 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0929 09:29:58.263848 6064 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0929 09:29:58.263946 6064 factory.go:656] Stopping watch factory\\\\nI0929 09:29:58.263982 6064 ovnkube.go:599] Stopped ovnkube\\\\nI0929 09:29:58.263866 6064 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0929 09:29:58.263881 6064 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0929 09:29:58.264033 6064 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0929 09:29:5\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:55Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97748c7b388a564dd18c6e13387163fc397c49cc10ed1cadaa7f44f95cc17741\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T09:29:59Z\\\",\\\"message\\\":\\\"de crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:59Z is after 2025-08-24T17:21:41Z]\\\\nI0929 09:29:59.880430 6181 services_controller.go:451] Built service openshift-machine-api/control-plane-machine-set-operator cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/control-plane-machine-set-operator_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/control-plane-machine-set-operator\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.41\\\\\\\", Port:9443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0929 09:29:59.880447 6181 services_controller.go:452] Built service openshift-machine-api/control-plane-machine-set-operator per-node LB for network=default: []services.LB{}\\\\nI0929 09:29:59.\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e140d46db39380a5737d78e1aa00ad1a5a9cc217de910ca4212beae87764795\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ncxc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:08Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.957548 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fxjq2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c469d18e-5b53-4a41-9305-87ae2f8db671\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c8ba3507185bd18ea878f73feed6ba33fb15a49a9f2575a66a88ac28fedef14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:30:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqdj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://01f30b5162df9f09f1af03ebeb5f7218a68b49bf4ff38cd06f3dad13d5042f88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:30:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqdj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:30:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fxjq2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:08Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.973028 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:08Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.977289 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.977368 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.977398 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.977431 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.977455 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:08Z","lastTransitionTime":"2025-09-29T09:30:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:08 crc kubenswrapper[4779]: I0929 09:30:08.994803 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2adc78ec8f54811c8fd09a5e558132f9f57501d9a92d4738935148bb4e714eb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:08Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:09 crc kubenswrapper[4779]: I0929 09:30:09.010646 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5584" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"564bff56-93cb-42ac-bd34-bbe97f99f411\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedbde6cf29bb77c762d7da2d2e414d3e7c2c6edb3addd97473367b89b2e3fe7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvwtv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5584\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:09Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:09 crc kubenswrapper[4779]: I0929 09:30:09.027470 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gx6f2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25e887ba-720d-4f7b-9763-5703781fd8bf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a1604b3eb7d80cc28d7d36550e740405d3012bb83109444bf7354793ebbdeb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9lw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gx6f2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:09Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:09 crc kubenswrapper[4779]: I0929 09:30:09.042602 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-qvlbd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"294a4484-da93-4c37-9ecf-18f68f4ad64d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nsgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nsgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:30:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-qvlbd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:09Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:09 crc kubenswrapper[4779]: I0929 09:30:09.079891 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:09 crc kubenswrapper[4779]: I0929 09:30:09.079977 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:09 crc kubenswrapper[4779]: I0929 09:30:09.079990 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:09 crc kubenswrapper[4779]: I0929 09:30:09.080035 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:09 crc kubenswrapper[4779]: I0929 09:30:09.080048 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:09Z","lastTransitionTime":"2025-09-29T09:30:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:09 crc kubenswrapper[4779]: I0929 09:30:09.184075 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:09 crc kubenswrapper[4779]: I0929 09:30:09.184212 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:09 crc kubenswrapper[4779]: I0929 09:30:09.184233 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:09 crc kubenswrapper[4779]: I0929 09:30:09.184257 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:09 crc kubenswrapper[4779]: I0929 09:30:09.184275 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:09Z","lastTransitionTime":"2025-09-29T09:30:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:09 crc kubenswrapper[4779]: I0929 09:30:09.286782 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:09 crc kubenswrapper[4779]: I0929 09:30:09.287793 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:09 crc kubenswrapper[4779]: I0929 09:30:09.287980 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:09 crc kubenswrapper[4779]: I0929 09:30:09.288219 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:09 crc kubenswrapper[4779]: I0929 09:30:09.288383 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:09Z","lastTransitionTime":"2025-09-29T09:30:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:09 crc kubenswrapper[4779]: I0929 09:30:09.391266 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:09 crc kubenswrapper[4779]: I0929 09:30:09.391525 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:09 crc kubenswrapper[4779]: I0929 09:30:09.391642 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:09 crc kubenswrapper[4779]: I0929 09:30:09.391765 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:09 crc kubenswrapper[4779]: I0929 09:30:09.391898 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:09Z","lastTransitionTime":"2025-09-29T09:30:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:09 crc kubenswrapper[4779]: I0929 09:30:09.494768 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:09 crc kubenswrapper[4779]: I0929 09:30:09.494831 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:09 crc kubenswrapper[4779]: I0929 09:30:09.494849 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:09 crc kubenswrapper[4779]: I0929 09:30:09.494873 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:09 crc kubenswrapper[4779]: I0929 09:30:09.494890 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:09Z","lastTransitionTime":"2025-09-29T09:30:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:09 crc kubenswrapper[4779]: I0929 09:30:09.598127 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:09 crc kubenswrapper[4779]: I0929 09:30:09.598182 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:09 crc kubenswrapper[4779]: I0929 09:30:09.598194 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:09 crc kubenswrapper[4779]: I0929 09:30:09.598210 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:09 crc kubenswrapper[4779]: I0929 09:30:09.598221 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:09Z","lastTransitionTime":"2025-09-29T09:30:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:09 crc kubenswrapper[4779]: I0929 09:30:09.701494 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:09 crc kubenswrapper[4779]: I0929 09:30:09.701570 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:09 crc kubenswrapper[4779]: I0929 09:30:09.701592 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:09 crc kubenswrapper[4779]: I0929 09:30:09.701624 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:09 crc kubenswrapper[4779]: I0929 09:30:09.701645 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:09Z","lastTransitionTime":"2025-09-29T09:30:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:09 crc kubenswrapper[4779]: I0929 09:30:09.713203 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:30:09 crc kubenswrapper[4779]: I0929 09:30:09.713219 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:30:09 crc kubenswrapper[4779]: E0929 09:30:09.713404 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 09:30:09 crc kubenswrapper[4779]: E0929 09:30:09.713562 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvlbd" podUID="294a4484-da93-4c37-9ecf-18f68f4ad64d" Sep 29 09:30:09 crc kubenswrapper[4779]: I0929 09:30:09.805290 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:09 crc kubenswrapper[4779]: I0929 09:30:09.805352 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:09 crc kubenswrapper[4779]: I0929 09:30:09.805370 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:09 crc kubenswrapper[4779]: I0929 09:30:09.805397 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:09 crc kubenswrapper[4779]: I0929 09:30:09.805416 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:09Z","lastTransitionTime":"2025-09-29T09:30:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:09 crc kubenswrapper[4779]: I0929 09:30:09.908163 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:09 crc kubenswrapper[4779]: I0929 09:30:09.908264 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:09 crc kubenswrapper[4779]: I0929 09:30:09.908284 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:09 crc kubenswrapper[4779]: I0929 09:30:09.908312 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:09 crc kubenswrapper[4779]: I0929 09:30:09.908330 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:09Z","lastTransitionTime":"2025-09-29T09:30:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:09 crc kubenswrapper[4779]: I0929 09:30:09.914753 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/294a4484-da93-4c37-9ecf-18f68f4ad64d-metrics-certs\") pod \"network-metrics-daemon-qvlbd\" (UID: \"294a4484-da93-4c37-9ecf-18f68f4ad64d\") " pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:30:09 crc kubenswrapper[4779]: E0929 09:30:09.914888 4779 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 09:30:09 crc kubenswrapper[4779]: E0929 09:30:09.914969 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/294a4484-da93-4c37-9ecf-18f68f4ad64d-metrics-certs podName:294a4484-da93-4c37-9ecf-18f68f4ad64d nodeName:}" failed. No retries permitted until 2025-09-29 09:30:17.914955065 +0000 UTC m=+49.896278959 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/294a4484-da93-4c37-9ecf-18f68f4ad64d-metrics-certs") pod "network-metrics-daemon-qvlbd" (UID: "294a4484-da93-4c37-9ecf-18f68f4ad64d") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 09:30:10 crc kubenswrapper[4779]: I0929 09:30:10.011278 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:10 crc kubenswrapper[4779]: I0929 09:30:10.011319 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:10 crc kubenswrapper[4779]: I0929 09:30:10.011328 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:10 crc kubenswrapper[4779]: I0929 09:30:10.011343 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:10 crc kubenswrapper[4779]: I0929 09:30:10.011353 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:10Z","lastTransitionTime":"2025-09-29T09:30:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:10 crc kubenswrapper[4779]: I0929 09:30:10.114382 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:10 crc kubenswrapper[4779]: I0929 09:30:10.114459 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:10 crc kubenswrapper[4779]: I0929 09:30:10.114485 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:10 crc kubenswrapper[4779]: I0929 09:30:10.114517 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:10 crc kubenswrapper[4779]: I0929 09:30:10.114541 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:10Z","lastTransitionTime":"2025-09-29T09:30:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:10 crc kubenswrapper[4779]: I0929 09:30:10.217807 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:10 crc kubenswrapper[4779]: I0929 09:30:10.217865 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:10 crc kubenswrapper[4779]: I0929 09:30:10.217878 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:10 crc kubenswrapper[4779]: I0929 09:30:10.217898 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:10 crc kubenswrapper[4779]: I0929 09:30:10.217949 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:10Z","lastTransitionTime":"2025-09-29T09:30:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:10 crc kubenswrapper[4779]: I0929 09:30:10.321133 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:10 crc kubenswrapper[4779]: I0929 09:30:10.321223 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:10 crc kubenswrapper[4779]: I0929 09:30:10.321262 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:10 crc kubenswrapper[4779]: I0929 09:30:10.321293 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:10 crc kubenswrapper[4779]: I0929 09:30:10.321314 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:10Z","lastTransitionTime":"2025-09-29T09:30:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:10 crc kubenswrapper[4779]: I0929 09:30:10.424653 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:10 crc kubenswrapper[4779]: I0929 09:30:10.424707 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:10 crc kubenswrapper[4779]: I0929 09:30:10.424723 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:10 crc kubenswrapper[4779]: I0929 09:30:10.424748 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:10 crc kubenswrapper[4779]: I0929 09:30:10.424765 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:10Z","lastTransitionTime":"2025-09-29T09:30:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:10 crc kubenswrapper[4779]: I0929 09:30:10.527640 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:10 crc kubenswrapper[4779]: I0929 09:30:10.527705 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:10 crc kubenswrapper[4779]: I0929 09:30:10.527726 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:10 crc kubenswrapper[4779]: I0929 09:30:10.527755 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:10 crc kubenswrapper[4779]: I0929 09:30:10.527776 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:10Z","lastTransitionTime":"2025-09-29T09:30:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:10 crc kubenswrapper[4779]: I0929 09:30:10.632281 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:10 crc kubenswrapper[4779]: I0929 09:30:10.632380 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:10 crc kubenswrapper[4779]: I0929 09:30:10.632401 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:10 crc kubenswrapper[4779]: I0929 09:30:10.632427 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:10 crc kubenswrapper[4779]: I0929 09:30:10.632447 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:10Z","lastTransitionTime":"2025-09-29T09:30:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:10 crc kubenswrapper[4779]: I0929 09:30:10.713727 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:30:10 crc kubenswrapper[4779]: I0929 09:30:10.713774 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:30:10 crc kubenswrapper[4779]: E0929 09:30:10.713977 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 09:30:10 crc kubenswrapper[4779]: E0929 09:30:10.714163 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 09:30:10 crc kubenswrapper[4779]: I0929 09:30:10.736938 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:10 crc kubenswrapper[4779]: I0929 09:30:10.736974 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:10 crc kubenswrapper[4779]: I0929 09:30:10.736985 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:10 crc kubenswrapper[4779]: I0929 09:30:10.737003 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:10 crc kubenswrapper[4779]: I0929 09:30:10.737015 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:10Z","lastTransitionTime":"2025-09-29T09:30:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:10 crc kubenswrapper[4779]: I0929 09:30:10.839684 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:10 crc kubenswrapper[4779]: I0929 09:30:10.839755 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:10 crc kubenswrapper[4779]: I0929 09:30:10.839772 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:10 crc kubenswrapper[4779]: I0929 09:30:10.839805 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:10 crc kubenswrapper[4779]: I0929 09:30:10.839824 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:10Z","lastTransitionTime":"2025-09-29T09:30:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:10 crc kubenswrapper[4779]: I0929 09:30:10.943515 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:10 crc kubenswrapper[4779]: I0929 09:30:10.943612 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:10 crc kubenswrapper[4779]: I0929 09:30:10.943660 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:10 crc kubenswrapper[4779]: I0929 09:30:10.943694 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:10 crc kubenswrapper[4779]: I0929 09:30:10.943713 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:10Z","lastTransitionTime":"2025-09-29T09:30:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:10 crc kubenswrapper[4779]: I0929 09:30:10.991053 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:30:10 crc kubenswrapper[4779]: I0929 09:30:10.992357 4779 scope.go:117] "RemoveContainer" containerID="97748c7b388a564dd18c6e13387163fc397c49cc10ed1cadaa7f44f95cc17741" Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.010450 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e291840-9db5-4515-810a-190428263dfb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6f627c2a2c06fbe034514d02939db8b7576b8cb753aa65a45a271482cf138c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde5335a7ca2db0e56ded3f2fc58506882df09718e6ada6dd1849b891ebc3dd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0f81c275d3f99ff02d4d105dfff34249cff38150c1aa6199bdc765773f0fa2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33e68ef001a27a30ef12923f95eaaef45667d18c8dab2997491caf12f595ed0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6837fc3f6d91505858a90e49945041b663c864b4ebcbd2ecc086c3575b47c1f7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T09:29:42Z\\\",\\\"message\\\":\\\"W0929 09:29:31.795697 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 09:29:31.796138 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759138171 cert, and key in /tmp/serving-cert-3245563206/serving-signer.crt, /tmp/serving-cert-3245563206/serving-signer.key\\\\nI0929 09:29:32.157414 1 observer_polling.go:159] Starting file observer\\\\nW0929 09:29:32.160455 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 09:29:32.160736 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 09:29:32.163324 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3245563206/tls.crt::/tmp/serving-cert-3245563206/tls.key\\\\\\\"\\\\nF0929 09:29:42.581603 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55099c1030bd4943e0df0b13b1203c7143f79edf5e02c8353ccd042610e8059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:11Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.030309 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:11Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.046686 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.047289 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.047325 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.047357 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.047377 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:11Z","lastTransitionTime":"2025-09-29T09:30:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.053765 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f96f3b3b0028e52c62e9b886d5f9c3347ec85f91ec0e5ef3de48bd216a2c687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f10690e584c904be791611af52b807ee3d20cb5551821042a1c5e71e1f1571d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:11Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.076296 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f75b386997d5f82c8a8136e8758dac2b770b2ef60bec255c70978046c3e6cd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:11Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.095188 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:11Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.116185 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2tkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b0e23f7-a478-48e2-a745-193a90e87553\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://944ced544427edda31dc5b05053a08a83a71829a896cb6dba002ca20ee3e56e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5rx8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2tkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:11Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.145058 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-twvvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3a42c70c25df51312e2e5ede663678cbccf94d50dad43886557024669dd2e82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfe38229bcd948f0f8e1e234c8e4ea16d4dbc8c5c4252f5486048db0c9910412\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfe38229bcd948f0f8e1e234c8e4ea16d4dbc8c5c4252f5486048db0c9910412\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acbf355d9614f64771657a4103437e9c55f3b2b5080282d7ad3af70a5f44ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbf355d9614f64771657a4103437e9c55f3b2b5080282d7ad3af70a5f44ffce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b854329d7e329a9099a032b8e91ab2064164ae0e43e57b61c5c55b42fca2a9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b854329d7e329a9099a032b8e91ab2064164ae0e43e57b61c5c55b42fca2a9ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93312b300c9348ca8caa0f12bd7825d6b7d1d720babd765aa679c570220ac15a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93312b300c9348ca8caa0f12bd7825d6b7d1d720babd765aa679c570220ac15a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-twvvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:11Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.150507 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.150580 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.150598 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.150647 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.150667 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:11Z","lastTransitionTime":"2025-09-29T09:30:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.166605 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0846537-9071-453d-968b-7537a7233656\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://103d18548d558c96520d7c4c31d3ef44abc2905afa5c26ca3f93d2a1cd6ff604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ce06a39c8fd0fba9a0bd1af96ff34cfb10a153801ecff617db4df5e64eb476b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe3edd15b34d8223ba98808b34bee098cb2ffa1ac4fd3e949007acc04303e370\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22cc6e6e78a7eaac01a6c53d5e5527ef3120c8c44bf28813e65038f939e79872\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cc3416bb385ad0ff953b03c268f186747c47a2dfdf11ba34a3b00be5391e90e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8442bd56bf7c2e6708a3b0f6c21ba803d9bc76a84d32647b507d5961a78e8621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8442bd56bf7c2e6708a3b0f6c21ba803d9bc76a84d32647b507d5961a78e8621\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b03f58ad046bc93040f00ae10e8f265099417fcb84c10b4ca1707d6744729648\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b03f58ad046bc93040f00ae10e8f265099417fcb84c10b4ca1707d6744729648\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7e2abd0b0dc5d3b95c510100e11f7f2fa4b05786325b692244ea51633d2f597e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e2abd0b0dc5d3b95c510100e11f7f2fa4b05786325b692244ea51633d2f597e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:11Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.182193 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19a67159-61a5-4f75-940a-212c850bd498\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ff866eb8fdb247e8bfe638be9525a31733ed912047dcbbc7514ab6324675b6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a69616a6ca151dfda518aae66819aac108f73967794661b878db2480ffbb5b16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4bb91e73e9e2666ab82d5a13fee96ed7406f6178863603e607766ae3172d3ee6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2148aa80d9f71c8424530711e10af5b0bd10437655123ac57b87d1c90a280233\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:11Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.195888 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1a5d3a7-37d9-4a87-864c-e4af7f504a19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462a76735c6e380ec32967ba87ebe13a19e1d3687ac87182f01b23ce8fabe8e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7979507aa9ce4c5e1c0f9a0d07716433ab9cebcc02e8b6ba01251ced0bab99f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5lnlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:11Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.219792 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60d71749-dfb5-4095-b11b-b70f1a549b88\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a334fd50a045f97d97f67b45abf88dc6aaf3d7c2121becc6a9744c86c977b070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d79814aeee8cb50cbe9a3153ed1a99a82921ac740553c82f6aafd1d7f628cbdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7851ecb6e73d56037db56d7d12fec0a095403080daa7bbf0d4cd79d9a4fd1bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa8dee8aadd625828a22e22668d8f7295ddc3fe24976be6c8610da1157e28d7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a11183ba612a838b92d401b13a2e0d7d1c19de533ecef56b4305e5a9f0784bb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d6677148b1efa21f5eee1dcbccef6d6ea0492513bfa621b1a862488653bc8f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97748c7b388a564dd18c6e13387163fc397c49cc10ed1cadaa7f44f95cc17741\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97748c7b388a564dd18c6e13387163fc397c49cc10ed1cadaa7f44f95cc17741\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T09:29:59Z\\\",\\\"message\\\":\\\"de crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:59Z is after 2025-08-24T17:21:41Z]\\\\nI0929 09:29:59.880430 6181 services_controller.go:451] Built service openshift-machine-api/control-plane-machine-set-operator cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/control-plane-machine-set-operator_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/control-plane-machine-set-operator\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.41\\\\\\\", Port:9443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0929 09:29:59.880447 6181 services_controller.go:452] Built service openshift-machine-api/control-plane-machine-set-operator per-node LB for network=default: []services.LB{}\\\\nI0929 09:29:59.\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:59Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-ncxc4_openshift-ovn-kubernetes(60d71749-dfb5-4095-b11b-b70f1a549b88)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e140d46db39380a5737d78e1aa00ad1a5a9cc217de910ca4212beae87764795\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ncxc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:11Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.239938 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fxjq2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c469d18e-5b53-4a41-9305-87ae2f8db671\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c8ba3507185bd18ea878f73feed6ba33fb15a49a9f2575a66a88ac28fedef14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:30:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqdj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://01f30b5162df9f09f1af03ebeb5f7218a68b49bf4ff38cd06f3dad13d5042f88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:30:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqdj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:30:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fxjq2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:11Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.253177 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.253223 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.253233 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.253250 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.253261 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:11Z","lastTransitionTime":"2025-09-29T09:30:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.256693 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:11Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.268882 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2adc78ec8f54811c8fd09a5e558132f9f57501d9a92d4738935148bb4e714eb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:11Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.278556 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5584" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"564bff56-93cb-42ac-bd34-bbe97f99f411\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedbde6cf29bb77c762d7da2d2e414d3e7c2c6edb3addd97473367b89b2e3fe7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvwtv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5584\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:11Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.287890 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gx6f2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25e887ba-720d-4f7b-9763-5703781fd8bf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a1604b3eb7d80cc28d7d36550e740405d3012bb83109444bf7354793ebbdeb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9lw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gx6f2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:11Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.298472 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-qvlbd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"294a4484-da93-4c37-9ecf-18f68f4ad64d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nsgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nsgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:30:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-qvlbd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:11Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.355621 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.355679 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.355692 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.355713 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.355726 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:11Z","lastTransitionTime":"2025-09-29T09:30:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.457953 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.457988 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.458000 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.458015 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.458025 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:11Z","lastTransitionTime":"2025-09-29T09:30:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.560613 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.560665 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.560679 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.560697 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.561086 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:11Z","lastTransitionTime":"2025-09-29T09:30:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.664036 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.664077 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.664087 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.664103 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.664115 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:11Z","lastTransitionTime":"2025-09-29T09:30:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.713687 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.713697 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:30:11 crc kubenswrapper[4779]: E0929 09:30:11.713811 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvlbd" podUID="294a4484-da93-4c37-9ecf-18f68f4ad64d" Sep 29 09:30:11 crc kubenswrapper[4779]: E0929 09:30:11.714060 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.766505 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.766550 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.766560 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.766576 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.766586 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:11Z","lastTransitionTime":"2025-09-29T09:30:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.869211 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.869254 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.869266 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.869284 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.869296 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:11Z","lastTransitionTime":"2025-09-29T09:30:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.971838 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.971895 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.971925 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.971943 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:11 crc kubenswrapper[4779]: I0929 09:30:11.971952 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:11Z","lastTransitionTime":"2025-09-29T09:30:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.010077 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-ncxc4_60d71749-dfb5-4095-b11b-b70f1a549b88/ovnkube-controller/2.log" Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.010956 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-ncxc4_60d71749-dfb5-4095-b11b-b70f1a549b88/ovnkube-controller/1.log" Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.014685 4779 generic.go:334] "Generic (PLEG): container finished" podID="60d71749-dfb5-4095-b11b-b70f1a549b88" containerID="c43f1fb41b70a75fdced863cef4c3e186627418facef4d9e5cf4ff1ea6db6432" exitCode=1 Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.014732 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" event={"ID":"60d71749-dfb5-4095-b11b-b70f1a549b88","Type":"ContainerDied","Data":"c43f1fb41b70a75fdced863cef4c3e186627418facef4d9e5cf4ff1ea6db6432"} Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.014777 4779 scope.go:117] "RemoveContainer" containerID="97748c7b388a564dd18c6e13387163fc397c49cc10ed1cadaa7f44f95cc17741" Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.016067 4779 scope.go:117] "RemoveContainer" containerID="c43f1fb41b70a75fdced863cef4c3e186627418facef4d9e5cf4ff1ea6db6432" Sep 29 09:30:12 crc kubenswrapper[4779]: E0929 09:30:12.016436 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-ncxc4_openshift-ovn-kubernetes(60d71749-dfb5-4095-b11b-b70f1a549b88)\"" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" podUID="60d71749-dfb5-4095-b11b-b70f1a549b88" Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.033765 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:12Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.056972 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f96f3b3b0028e52c62e9b886d5f9c3347ec85f91ec0e5ef3de48bd216a2c687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f10690e584c904be791611af52b807ee3d20cb5551821042a1c5e71e1f1571d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:12Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.074720 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.074808 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.074822 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.074842 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.074858 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:12Z","lastTransitionTime":"2025-09-29T09:30:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.077587 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f75b386997d5f82c8a8136e8758dac2b770b2ef60bec255c70978046c3e6cd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:12Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.092402 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:12Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.106747 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2tkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b0e23f7-a478-48e2-a745-193a90e87553\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://944ced544427edda31dc5b05053a08a83a71829a896cb6dba002ca20ee3e56e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5rx8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2tkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:12Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.123185 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-twvvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3a42c70c25df51312e2e5ede663678cbccf94d50dad43886557024669dd2e82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfe38229bcd948f0f8e1e234c8e4ea16d4dbc8c5c4252f5486048db0c9910412\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfe38229bcd948f0f8e1e234c8e4ea16d4dbc8c5c4252f5486048db0c9910412\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acbf355d9614f64771657a4103437e9c55f3b2b5080282d7ad3af70a5f44ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbf355d9614f64771657a4103437e9c55f3b2b5080282d7ad3af70a5f44ffce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b854329d7e329a9099a032b8e91ab2064164ae0e43e57b61c5c55b42fca2a9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b854329d7e329a9099a032b8e91ab2064164ae0e43e57b61c5c55b42fca2a9ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93312b300c9348ca8caa0f12bd7825d6b7d1d720babd765aa679c570220ac15a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93312b300c9348ca8caa0f12bd7825d6b7d1d720babd765aa679c570220ac15a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-twvvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:12Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.139489 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e291840-9db5-4515-810a-190428263dfb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6f627c2a2c06fbe034514d02939db8b7576b8cb753aa65a45a271482cf138c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde5335a7ca2db0e56ded3f2fc58506882df09718e6ada6dd1849b891ebc3dd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0f81c275d3f99ff02d4d105dfff34249cff38150c1aa6199bdc765773f0fa2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33e68ef001a27a30ef12923f95eaaef45667d18c8dab2997491caf12f595ed0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6837fc3f6d91505858a90e49945041b663c864b4ebcbd2ecc086c3575b47c1f7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T09:29:42Z\\\",\\\"message\\\":\\\"W0929 09:29:31.795697 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 09:29:31.796138 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759138171 cert, and key in /tmp/serving-cert-3245563206/serving-signer.crt, /tmp/serving-cert-3245563206/serving-signer.key\\\\nI0929 09:29:32.157414 1 observer_polling.go:159] Starting file observer\\\\nW0929 09:29:32.160455 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 09:29:32.160736 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 09:29:32.163324 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3245563206/tls.crt::/tmp/serving-cert-3245563206/tls.key\\\\\\\"\\\\nF0929 09:29:42.581603 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55099c1030bd4943e0df0b13b1203c7143f79edf5e02c8353ccd042610e8059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:12Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.161527 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19a67159-61a5-4f75-940a-212c850bd498\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ff866eb8fdb247e8bfe638be9525a31733ed912047dcbbc7514ab6324675b6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a69616a6ca151dfda518aae66819aac108f73967794661b878db2480ffbb5b16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4bb91e73e9e2666ab82d5a13fee96ed7406f6178863603e607766ae3172d3ee6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2148aa80d9f71c8424530711e10af5b0bd10437655123ac57b87d1c90a280233\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:12Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.174655 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1a5d3a7-37d9-4a87-864c-e4af7f504a19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462a76735c6e380ec32967ba87ebe13a19e1d3687ac87182f01b23ce8fabe8e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7979507aa9ce4c5e1c0f9a0d07716433ab9cebcc02e8b6ba01251ced0bab99f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5lnlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:12Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.177724 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.177765 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.177778 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.177796 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.177808 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:12Z","lastTransitionTime":"2025-09-29T09:30:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.197665 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60d71749-dfb5-4095-b11b-b70f1a549b88\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a334fd50a045f97d97f67b45abf88dc6aaf3d7c2121becc6a9744c86c977b070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d79814aeee8cb50cbe9a3153ed1a99a82921ac740553c82f6aafd1d7f628cbdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7851ecb6e73d56037db56d7d12fec0a095403080daa7bbf0d4cd79d9a4fd1bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa8dee8aadd625828a22e22668d8f7295ddc3fe24976be6c8610da1157e28d7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a11183ba612a838b92d401b13a2e0d7d1c19de533ecef56b4305e5a9f0784bb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d6677148b1efa21f5eee1dcbccef6d6ea0492513bfa621b1a862488653bc8f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c43f1fb41b70a75fdced863cef4c3e186627418facef4d9e5cf4ff1ea6db6432\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97748c7b388a564dd18c6e13387163fc397c49cc10ed1cadaa7f44f95cc17741\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T09:29:59Z\\\",\\\"message\\\":\\\"de crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:59Z is after 2025-08-24T17:21:41Z]\\\\nI0929 09:29:59.880430 6181 services_controller.go:451] Built service openshift-machine-api/control-plane-machine-set-operator cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/control-plane-machine-set-operator_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/control-plane-machine-set-operator\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.41\\\\\\\", Port:9443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0929 09:29:59.880447 6181 services_controller.go:452] Built service openshift-machine-api/control-plane-machine-set-operator per-node LB for network=default: []services.LB{}\\\\nI0929 09:29:59.\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:59Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c43f1fb41b70a75fdced863cef4c3e186627418facef4d9e5cf4ff1ea6db6432\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T09:30:11Z\\\",\\\"message\\\":\\\"o start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:11Z is after 2025-08-24T17:21:41Z]\\\\nI0929 09:30:11.835139 6393 ovn.go:134] Ensuring zone local for Pod openshift-image-registry/node-ca-gx6f2 in node crc\\\\nI0929 09:30:11.835086 6393 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-network-console/networking-console-plugin_TCP_cluster\\\\\\\", UUID:\\\\\\\"ab0b1d51-5ec6-479b-8881-93dfa8d30337\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-network-console/networking-console-plugin\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-network-console/networking-console-plugin_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Pro\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:30:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e140d46db39380a5737d78e1aa00ad1a5a9cc217de910ca4212beae87764795\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ncxc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:12Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.210520 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fxjq2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c469d18e-5b53-4a41-9305-87ae2f8db671\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c8ba3507185bd18ea878f73feed6ba33fb15a49a9f2575a66a88ac28fedef14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:30:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqdj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://01f30b5162df9f09f1af03ebeb5f7218a68b49bf4ff38cd06f3dad13d5042f88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:30:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqdj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:30:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fxjq2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:12Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.245845 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0846537-9071-453d-968b-7537a7233656\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://103d18548d558c96520d7c4c31d3ef44abc2905afa5c26ca3f93d2a1cd6ff604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ce06a39c8fd0fba9a0bd1af96ff34cfb10a153801ecff617db4df5e64eb476b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe3edd15b34d8223ba98808b34bee098cb2ffa1ac4fd3e949007acc04303e370\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22cc6e6e78a7eaac01a6c53d5e5527ef3120c8c44bf28813e65038f939e79872\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cc3416bb385ad0ff953b03c268f186747c47a2dfdf11ba34a3b00be5391e90e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8442bd56bf7c2e6708a3b0f6c21ba803d9bc76a84d32647b507d5961a78e8621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8442bd56bf7c2e6708a3b0f6c21ba803d9bc76a84d32647b507d5961a78e8621\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b03f58ad046bc93040f00ae10e8f265099417fcb84c10b4ca1707d6744729648\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b03f58ad046bc93040f00ae10e8f265099417fcb84c10b4ca1707d6744729648\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7e2abd0b0dc5d3b95c510100e11f7f2fa4b05786325b692244ea51633d2f597e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e2abd0b0dc5d3b95c510100e11f7f2fa4b05786325b692244ea51633d2f597e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:12Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.262768 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:12Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.275352 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5584" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"564bff56-93cb-42ac-bd34-bbe97f99f411\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedbde6cf29bb77c762d7da2d2e414d3e7c2c6edb3addd97473367b89b2e3fe7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvwtv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5584\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:12Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.281417 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.281495 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.281519 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.281556 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.281584 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:12Z","lastTransitionTime":"2025-09-29T09:30:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.287586 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gx6f2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25e887ba-720d-4f7b-9763-5703781fd8bf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a1604b3eb7d80cc28d7d36550e740405d3012bb83109444bf7354793ebbdeb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9lw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gx6f2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:12Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.299870 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-qvlbd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"294a4484-da93-4c37-9ecf-18f68f4ad64d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nsgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nsgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:30:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-qvlbd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:12Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.316302 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2adc78ec8f54811c8fd09a5e558132f9f57501d9a92d4738935148bb4e714eb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:12Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.385240 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.385283 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.385296 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.385316 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.385332 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:12Z","lastTransitionTime":"2025-09-29T09:30:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.488696 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.488756 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.488777 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.488804 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.488824 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:12Z","lastTransitionTime":"2025-09-29T09:30:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.592030 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.592127 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.592151 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.592185 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.592209 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:12Z","lastTransitionTime":"2025-09-29T09:30:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.695536 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.695574 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.695583 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.695597 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.695607 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:12Z","lastTransitionTime":"2025-09-29T09:30:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.713213 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.713269 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:30:12 crc kubenswrapper[4779]: E0929 09:30:12.713312 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 09:30:12 crc kubenswrapper[4779]: E0929 09:30:12.713589 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.799503 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.799598 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.799621 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.799650 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.799668 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:12Z","lastTransitionTime":"2025-09-29T09:30:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.903465 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.903549 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.903572 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.903605 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:12 crc kubenswrapper[4779]: I0929 09:30:12.903628 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:12Z","lastTransitionTime":"2025-09-29T09:30:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:13 crc kubenswrapper[4779]: I0929 09:30:13.007881 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:13 crc kubenswrapper[4779]: I0929 09:30:13.008048 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:13 crc kubenswrapper[4779]: I0929 09:30:13.008079 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:13 crc kubenswrapper[4779]: I0929 09:30:13.008125 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:13 crc kubenswrapper[4779]: I0929 09:30:13.008154 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:13Z","lastTransitionTime":"2025-09-29T09:30:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:13 crc kubenswrapper[4779]: I0929 09:30:13.022842 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-ncxc4_60d71749-dfb5-4095-b11b-b70f1a549b88/ovnkube-controller/2.log" Sep 29 09:30:13 crc kubenswrapper[4779]: I0929 09:30:13.112394 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:13 crc kubenswrapper[4779]: I0929 09:30:13.112468 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:13 crc kubenswrapper[4779]: I0929 09:30:13.112489 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:13 crc kubenswrapper[4779]: I0929 09:30:13.112703 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:13 crc kubenswrapper[4779]: I0929 09:30:13.112725 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:13Z","lastTransitionTime":"2025-09-29T09:30:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:13 crc kubenswrapper[4779]: I0929 09:30:13.216640 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:13 crc kubenswrapper[4779]: I0929 09:30:13.216702 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:13 crc kubenswrapper[4779]: I0929 09:30:13.216725 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:13 crc kubenswrapper[4779]: I0929 09:30:13.216753 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:13 crc kubenswrapper[4779]: I0929 09:30:13.216775 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:13Z","lastTransitionTime":"2025-09-29T09:30:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:13 crc kubenswrapper[4779]: I0929 09:30:13.320057 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:13 crc kubenswrapper[4779]: I0929 09:30:13.320139 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:13 crc kubenswrapper[4779]: I0929 09:30:13.320191 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:13 crc kubenswrapper[4779]: I0929 09:30:13.320216 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:13 crc kubenswrapper[4779]: I0929 09:30:13.320228 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:13Z","lastTransitionTime":"2025-09-29T09:30:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:13 crc kubenswrapper[4779]: I0929 09:30:13.423976 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:13 crc kubenswrapper[4779]: I0929 09:30:13.424052 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:13 crc kubenswrapper[4779]: I0929 09:30:13.424076 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:13 crc kubenswrapper[4779]: I0929 09:30:13.424111 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:13 crc kubenswrapper[4779]: I0929 09:30:13.424133 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:13Z","lastTransitionTime":"2025-09-29T09:30:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:13 crc kubenswrapper[4779]: I0929 09:30:13.527472 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:13 crc kubenswrapper[4779]: I0929 09:30:13.527560 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:13 crc kubenswrapper[4779]: I0929 09:30:13.527578 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:13 crc kubenswrapper[4779]: I0929 09:30:13.527611 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:13 crc kubenswrapper[4779]: I0929 09:30:13.527630 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:13Z","lastTransitionTime":"2025-09-29T09:30:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:13 crc kubenswrapper[4779]: I0929 09:30:13.632142 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:13 crc kubenswrapper[4779]: I0929 09:30:13.632217 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:13 crc kubenswrapper[4779]: I0929 09:30:13.632243 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:13 crc kubenswrapper[4779]: I0929 09:30:13.632281 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:13 crc kubenswrapper[4779]: I0929 09:30:13.632307 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:13Z","lastTransitionTime":"2025-09-29T09:30:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:13 crc kubenswrapper[4779]: I0929 09:30:13.713801 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:30:13 crc kubenswrapper[4779]: E0929 09:30:13.714004 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 09:30:13 crc kubenswrapper[4779]: I0929 09:30:13.714423 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:30:13 crc kubenswrapper[4779]: E0929 09:30:13.714715 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvlbd" podUID="294a4484-da93-4c37-9ecf-18f68f4ad64d" Sep 29 09:30:13 crc kubenswrapper[4779]: I0929 09:30:13.736334 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:13 crc kubenswrapper[4779]: I0929 09:30:13.736449 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:13 crc kubenswrapper[4779]: I0929 09:30:13.736472 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:13 crc kubenswrapper[4779]: I0929 09:30:13.736543 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:13 crc kubenswrapper[4779]: I0929 09:30:13.736562 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:13Z","lastTransitionTime":"2025-09-29T09:30:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:13 crc kubenswrapper[4779]: I0929 09:30:13.841326 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:13 crc kubenswrapper[4779]: I0929 09:30:13.841421 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:13 crc kubenswrapper[4779]: I0929 09:30:13.841446 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:13 crc kubenswrapper[4779]: I0929 09:30:13.841489 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:13 crc kubenswrapper[4779]: I0929 09:30:13.841519 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:13Z","lastTransitionTime":"2025-09-29T09:30:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:13 crc kubenswrapper[4779]: I0929 09:30:13.945189 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:13 crc kubenswrapper[4779]: I0929 09:30:13.945227 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:13 crc kubenswrapper[4779]: I0929 09:30:13.945238 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:13 crc kubenswrapper[4779]: I0929 09:30:13.945259 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:13 crc kubenswrapper[4779]: I0929 09:30:13.945272 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:13Z","lastTransitionTime":"2025-09-29T09:30:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:14 crc kubenswrapper[4779]: I0929 09:30:14.048290 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:14 crc kubenswrapper[4779]: I0929 09:30:14.048341 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:14 crc kubenswrapper[4779]: I0929 09:30:14.048356 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:14 crc kubenswrapper[4779]: I0929 09:30:14.048380 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:14 crc kubenswrapper[4779]: I0929 09:30:14.048395 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:14Z","lastTransitionTime":"2025-09-29T09:30:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:14 crc kubenswrapper[4779]: I0929 09:30:14.150728 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:14 crc kubenswrapper[4779]: I0929 09:30:14.150782 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:14 crc kubenswrapper[4779]: I0929 09:30:14.150794 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:14 crc kubenswrapper[4779]: I0929 09:30:14.150817 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:14 crc kubenswrapper[4779]: I0929 09:30:14.150841 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:14Z","lastTransitionTime":"2025-09-29T09:30:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:14 crc kubenswrapper[4779]: I0929 09:30:14.254805 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:14 crc kubenswrapper[4779]: I0929 09:30:14.254896 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:14 crc kubenswrapper[4779]: I0929 09:30:14.254947 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:14 crc kubenswrapper[4779]: I0929 09:30:14.254981 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:14 crc kubenswrapper[4779]: I0929 09:30:14.255005 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:14Z","lastTransitionTime":"2025-09-29T09:30:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:14 crc kubenswrapper[4779]: I0929 09:30:14.358682 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:14 crc kubenswrapper[4779]: I0929 09:30:14.358746 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:14 crc kubenswrapper[4779]: I0929 09:30:14.358763 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:14 crc kubenswrapper[4779]: I0929 09:30:14.358789 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:14 crc kubenswrapper[4779]: I0929 09:30:14.358810 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:14Z","lastTransitionTime":"2025-09-29T09:30:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:14 crc kubenswrapper[4779]: I0929 09:30:14.463108 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:14 crc kubenswrapper[4779]: I0929 09:30:14.463224 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:14 crc kubenswrapper[4779]: I0929 09:30:14.463250 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:14 crc kubenswrapper[4779]: I0929 09:30:14.463280 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:14 crc kubenswrapper[4779]: I0929 09:30:14.463343 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:14Z","lastTransitionTime":"2025-09-29T09:30:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:14 crc kubenswrapper[4779]: I0929 09:30:14.567564 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:14 crc kubenswrapper[4779]: I0929 09:30:14.567641 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:14 crc kubenswrapper[4779]: I0929 09:30:14.567662 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:14 crc kubenswrapper[4779]: I0929 09:30:14.567687 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:14 crc kubenswrapper[4779]: I0929 09:30:14.567707 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:14Z","lastTransitionTime":"2025-09-29T09:30:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:14 crc kubenswrapper[4779]: I0929 09:30:14.671664 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:14 crc kubenswrapper[4779]: I0929 09:30:14.671730 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:14 crc kubenswrapper[4779]: I0929 09:30:14.671759 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:14 crc kubenswrapper[4779]: I0929 09:30:14.671792 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:14 crc kubenswrapper[4779]: I0929 09:30:14.671815 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:14Z","lastTransitionTime":"2025-09-29T09:30:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:14 crc kubenswrapper[4779]: I0929 09:30:14.713975 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:30:14 crc kubenswrapper[4779]: E0929 09:30:14.714159 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 09:30:14 crc kubenswrapper[4779]: I0929 09:30:14.714251 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:30:14 crc kubenswrapper[4779]: E0929 09:30:14.714452 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 09:30:14 crc kubenswrapper[4779]: I0929 09:30:14.774874 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:14 crc kubenswrapper[4779]: I0929 09:30:14.774978 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:14 crc kubenswrapper[4779]: I0929 09:30:14.774997 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:14 crc kubenswrapper[4779]: I0929 09:30:14.775022 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:14 crc kubenswrapper[4779]: I0929 09:30:14.775040 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:14Z","lastTransitionTime":"2025-09-29T09:30:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:14 crc kubenswrapper[4779]: I0929 09:30:14.877942 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:14 crc kubenswrapper[4779]: I0929 09:30:14.878008 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:14 crc kubenswrapper[4779]: I0929 09:30:14.878027 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:14 crc kubenswrapper[4779]: I0929 09:30:14.878052 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:14 crc kubenswrapper[4779]: I0929 09:30:14.878069 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:14Z","lastTransitionTime":"2025-09-29T09:30:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:14 crc kubenswrapper[4779]: I0929 09:30:14.981264 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:14 crc kubenswrapper[4779]: I0929 09:30:14.981323 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:14 crc kubenswrapper[4779]: I0929 09:30:14.981340 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:14 crc kubenswrapper[4779]: I0929 09:30:14.981365 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:14 crc kubenswrapper[4779]: I0929 09:30:14.981382 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:14Z","lastTransitionTime":"2025-09-29T09:30:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:15 crc kubenswrapper[4779]: I0929 09:30:15.084379 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:15 crc kubenswrapper[4779]: I0929 09:30:15.084434 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:15 crc kubenswrapper[4779]: I0929 09:30:15.084453 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:15 crc kubenswrapper[4779]: I0929 09:30:15.084481 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:15 crc kubenswrapper[4779]: I0929 09:30:15.084503 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:15Z","lastTransitionTime":"2025-09-29T09:30:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:15 crc kubenswrapper[4779]: I0929 09:30:15.187782 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:15 crc kubenswrapper[4779]: I0929 09:30:15.187844 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:15 crc kubenswrapper[4779]: I0929 09:30:15.187861 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:15 crc kubenswrapper[4779]: I0929 09:30:15.187886 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:15 crc kubenswrapper[4779]: I0929 09:30:15.187930 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:15Z","lastTransitionTime":"2025-09-29T09:30:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:15 crc kubenswrapper[4779]: I0929 09:30:15.291378 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:15 crc kubenswrapper[4779]: I0929 09:30:15.291432 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:15 crc kubenswrapper[4779]: I0929 09:30:15.291448 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:15 crc kubenswrapper[4779]: I0929 09:30:15.291471 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:15 crc kubenswrapper[4779]: I0929 09:30:15.291490 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:15Z","lastTransitionTime":"2025-09-29T09:30:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:15 crc kubenswrapper[4779]: I0929 09:30:15.394414 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:15 crc kubenswrapper[4779]: I0929 09:30:15.394446 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:15 crc kubenswrapper[4779]: I0929 09:30:15.394456 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:15 crc kubenswrapper[4779]: I0929 09:30:15.394468 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:15 crc kubenswrapper[4779]: I0929 09:30:15.394477 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:15Z","lastTransitionTime":"2025-09-29T09:30:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:15 crc kubenswrapper[4779]: I0929 09:30:15.497979 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:15 crc kubenswrapper[4779]: I0929 09:30:15.498024 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:15 crc kubenswrapper[4779]: I0929 09:30:15.498037 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:15 crc kubenswrapper[4779]: I0929 09:30:15.498053 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:15 crc kubenswrapper[4779]: I0929 09:30:15.498062 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:15Z","lastTransitionTime":"2025-09-29T09:30:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:15 crc kubenswrapper[4779]: I0929 09:30:15.601650 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:15 crc kubenswrapper[4779]: I0929 09:30:15.601719 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:15 crc kubenswrapper[4779]: I0929 09:30:15.601738 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:15 crc kubenswrapper[4779]: I0929 09:30:15.601762 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:15 crc kubenswrapper[4779]: I0929 09:30:15.601779 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:15Z","lastTransitionTime":"2025-09-29T09:30:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:15 crc kubenswrapper[4779]: I0929 09:30:15.704651 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:15 crc kubenswrapper[4779]: I0929 09:30:15.704735 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:15 crc kubenswrapper[4779]: I0929 09:30:15.704756 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:15 crc kubenswrapper[4779]: I0929 09:30:15.704789 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:15 crc kubenswrapper[4779]: I0929 09:30:15.704811 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:15Z","lastTransitionTime":"2025-09-29T09:30:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:15 crc kubenswrapper[4779]: I0929 09:30:15.714016 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:30:15 crc kubenswrapper[4779]: E0929 09:30:15.714155 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvlbd" podUID="294a4484-da93-4c37-9ecf-18f68f4ad64d" Sep 29 09:30:15 crc kubenswrapper[4779]: I0929 09:30:15.714197 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:30:15 crc kubenswrapper[4779]: E0929 09:30:15.714347 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 09:30:15 crc kubenswrapper[4779]: I0929 09:30:15.807672 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:15 crc kubenswrapper[4779]: I0929 09:30:15.807705 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:15 crc kubenswrapper[4779]: I0929 09:30:15.807713 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:15 crc kubenswrapper[4779]: I0929 09:30:15.807727 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:15 crc kubenswrapper[4779]: I0929 09:30:15.807753 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:15Z","lastTransitionTime":"2025-09-29T09:30:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:15 crc kubenswrapper[4779]: I0929 09:30:15.910350 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:15 crc kubenswrapper[4779]: I0929 09:30:15.910379 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:15 crc kubenswrapper[4779]: I0929 09:30:15.910387 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:15 crc kubenswrapper[4779]: I0929 09:30:15.910401 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:15 crc kubenswrapper[4779]: I0929 09:30:15.910411 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:15Z","lastTransitionTime":"2025-09-29T09:30:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:16 crc kubenswrapper[4779]: I0929 09:30:16.013349 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:16 crc kubenswrapper[4779]: I0929 09:30:16.013414 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:16 crc kubenswrapper[4779]: I0929 09:30:16.013431 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:16 crc kubenswrapper[4779]: I0929 09:30:16.013454 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:16 crc kubenswrapper[4779]: I0929 09:30:16.013470 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:16Z","lastTransitionTime":"2025-09-29T09:30:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:16 crc kubenswrapper[4779]: I0929 09:30:16.117268 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:16 crc kubenswrapper[4779]: I0929 09:30:16.117331 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:16 crc kubenswrapper[4779]: I0929 09:30:16.117349 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:16 crc kubenswrapper[4779]: I0929 09:30:16.117374 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:16 crc kubenswrapper[4779]: I0929 09:30:16.117392 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:16Z","lastTransitionTime":"2025-09-29T09:30:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:16 crc kubenswrapper[4779]: I0929 09:30:16.220556 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:16 crc kubenswrapper[4779]: I0929 09:30:16.221046 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:16 crc kubenswrapper[4779]: I0929 09:30:16.221184 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:16 crc kubenswrapper[4779]: I0929 09:30:16.221322 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:16 crc kubenswrapper[4779]: I0929 09:30:16.221456 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:16Z","lastTransitionTime":"2025-09-29T09:30:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:16 crc kubenswrapper[4779]: I0929 09:30:16.324338 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:16 crc kubenswrapper[4779]: I0929 09:30:16.324664 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:16 crc kubenswrapper[4779]: I0929 09:30:16.324882 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:16 crc kubenswrapper[4779]: I0929 09:30:16.325104 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:16 crc kubenswrapper[4779]: I0929 09:30:16.325290 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:16Z","lastTransitionTime":"2025-09-29T09:30:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:16 crc kubenswrapper[4779]: I0929 09:30:16.429080 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:16 crc kubenswrapper[4779]: I0929 09:30:16.429111 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:16 crc kubenswrapper[4779]: I0929 09:30:16.429120 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:16 crc kubenswrapper[4779]: I0929 09:30:16.429133 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:16 crc kubenswrapper[4779]: I0929 09:30:16.429142 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:16Z","lastTransitionTime":"2025-09-29T09:30:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:16 crc kubenswrapper[4779]: I0929 09:30:16.531960 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:16 crc kubenswrapper[4779]: I0929 09:30:16.532030 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:16 crc kubenswrapper[4779]: I0929 09:30:16.532049 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:16 crc kubenswrapper[4779]: I0929 09:30:16.532074 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:16 crc kubenswrapper[4779]: I0929 09:30:16.532091 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:16Z","lastTransitionTime":"2025-09-29T09:30:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:16 crc kubenswrapper[4779]: I0929 09:30:16.635356 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:16 crc kubenswrapper[4779]: I0929 09:30:16.635408 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:16 crc kubenswrapper[4779]: I0929 09:30:16.635420 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:16 crc kubenswrapper[4779]: I0929 09:30:16.635439 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:16 crc kubenswrapper[4779]: I0929 09:30:16.635454 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:16Z","lastTransitionTime":"2025-09-29T09:30:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:16 crc kubenswrapper[4779]: I0929 09:30:16.713316 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:30:16 crc kubenswrapper[4779]: I0929 09:30:16.713341 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:30:16 crc kubenswrapper[4779]: E0929 09:30:16.713566 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 09:30:16 crc kubenswrapper[4779]: E0929 09:30:16.713463 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 09:30:16 crc kubenswrapper[4779]: I0929 09:30:16.738399 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:16 crc kubenswrapper[4779]: I0929 09:30:16.738463 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:16 crc kubenswrapper[4779]: I0929 09:30:16.738484 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:16 crc kubenswrapper[4779]: I0929 09:30:16.738511 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:16 crc kubenswrapper[4779]: I0929 09:30:16.738530 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:16Z","lastTransitionTime":"2025-09-29T09:30:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:16 crc kubenswrapper[4779]: I0929 09:30:16.841481 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:16 crc kubenswrapper[4779]: I0929 09:30:16.841534 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:16 crc kubenswrapper[4779]: I0929 09:30:16.841546 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:16 crc kubenswrapper[4779]: I0929 09:30:16.841563 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:16 crc kubenswrapper[4779]: I0929 09:30:16.841574 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:16Z","lastTransitionTime":"2025-09-29T09:30:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:16 crc kubenswrapper[4779]: I0929 09:30:16.944022 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:16 crc kubenswrapper[4779]: I0929 09:30:16.944085 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:16 crc kubenswrapper[4779]: I0929 09:30:16.944104 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:16 crc kubenswrapper[4779]: I0929 09:30:16.944129 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:16 crc kubenswrapper[4779]: I0929 09:30:16.944146 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:16Z","lastTransitionTime":"2025-09-29T09:30:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:17 crc kubenswrapper[4779]: I0929 09:30:17.046662 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:17 crc kubenswrapper[4779]: I0929 09:30:17.046695 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:17 crc kubenswrapper[4779]: I0929 09:30:17.046703 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:17 crc kubenswrapper[4779]: I0929 09:30:17.046716 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:17 crc kubenswrapper[4779]: I0929 09:30:17.046725 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:17Z","lastTransitionTime":"2025-09-29T09:30:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:17 crc kubenswrapper[4779]: I0929 09:30:17.149596 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:17 crc kubenswrapper[4779]: I0929 09:30:17.149638 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:17 crc kubenswrapper[4779]: I0929 09:30:17.149652 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:17 crc kubenswrapper[4779]: I0929 09:30:17.149672 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:17 crc kubenswrapper[4779]: I0929 09:30:17.149684 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:17Z","lastTransitionTime":"2025-09-29T09:30:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:17 crc kubenswrapper[4779]: I0929 09:30:17.252137 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:17 crc kubenswrapper[4779]: I0929 09:30:17.252202 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:17 crc kubenswrapper[4779]: I0929 09:30:17.252275 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:17 crc kubenswrapper[4779]: I0929 09:30:17.252302 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:17 crc kubenswrapper[4779]: I0929 09:30:17.252322 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:17Z","lastTransitionTime":"2025-09-29T09:30:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:17 crc kubenswrapper[4779]: I0929 09:30:17.355688 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:17 crc kubenswrapper[4779]: I0929 09:30:17.355780 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:17 crc kubenswrapper[4779]: I0929 09:30:17.355808 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:17 crc kubenswrapper[4779]: I0929 09:30:17.355840 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:17 crc kubenswrapper[4779]: I0929 09:30:17.355866 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:17Z","lastTransitionTime":"2025-09-29T09:30:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:17 crc kubenswrapper[4779]: I0929 09:30:17.459673 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:17 crc kubenswrapper[4779]: I0929 09:30:17.459766 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:17 crc kubenswrapper[4779]: I0929 09:30:17.459790 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:17 crc kubenswrapper[4779]: I0929 09:30:17.459825 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:17 crc kubenswrapper[4779]: I0929 09:30:17.459855 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:17Z","lastTransitionTime":"2025-09-29T09:30:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:17 crc kubenswrapper[4779]: I0929 09:30:17.564285 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:17 crc kubenswrapper[4779]: I0929 09:30:17.564372 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:17 crc kubenswrapper[4779]: I0929 09:30:17.564396 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:17 crc kubenswrapper[4779]: I0929 09:30:17.564427 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:17 crc kubenswrapper[4779]: I0929 09:30:17.564450 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:17Z","lastTransitionTime":"2025-09-29T09:30:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:17 crc kubenswrapper[4779]: I0929 09:30:17.668179 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:17 crc kubenswrapper[4779]: I0929 09:30:17.668242 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:17 crc kubenswrapper[4779]: I0929 09:30:17.668260 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:17 crc kubenswrapper[4779]: I0929 09:30:17.668295 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:17 crc kubenswrapper[4779]: I0929 09:30:17.668316 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:17Z","lastTransitionTime":"2025-09-29T09:30:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:17 crc kubenswrapper[4779]: I0929 09:30:17.713972 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:30:17 crc kubenswrapper[4779]: I0929 09:30:17.713986 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:30:17 crc kubenswrapper[4779]: E0929 09:30:17.714196 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 09:30:17 crc kubenswrapper[4779]: E0929 09:30:17.714370 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvlbd" podUID="294a4484-da93-4c37-9ecf-18f68f4ad64d" Sep 29 09:30:17 crc kubenswrapper[4779]: I0929 09:30:17.772220 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:17 crc kubenswrapper[4779]: I0929 09:30:17.772292 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:17 crc kubenswrapper[4779]: I0929 09:30:17.772316 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:17 crc kubenswrapper[4779]: I0929 09:30:17.772348 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:17 crc kubenswrapper[4779]: I0929 09:30:17.772373 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:17Z","lastTransitionTime":"2025-09-29T09:30:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:17 crc kubenswrapper[4779]: I0929 09:30:17.875561 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:17 crc kubenswrapper[4779]: I0929 09:30:17.875627 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:17 crc kubenswrapper[4779]: I0929 09:30:17.875644 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:17 crc kubenswrapper[4779]: I0929 09:30:17.875668 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:17 crc kubenswrapper[4779]: I0929 09:30:17.875685 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:17Z","lastTransitionTime":"2025-09-29T09:30:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:17 crc kubenswrapper[4779]: I0929 09:30:17.922529 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/294a4484-da93-4c37-9ecf-18f68f4ad64d-metrics-certs\") pod \"network-metrics-daemon-qvlbd\" (UID: \"294a4484-da93-4c37-9ecf-18f68f4ad64d\") " pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:30:17 crc kubenswrapper[4779]: E0929 09:30:17.922829 4779 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 09:30:17 crc kubenswrapper[4779]: E0929 09:30:17.922983 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/294a4484-da93-4c37-9ecf-18f68f4ad64d-metrics-certs podName:294a4484-da93-4c37-9ecf-18f68f4ad64d nodeName:}" failed. No retries permitted until 2025-09-29 09:30:33.922954833 +0000 UTC m=+65.904278777 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/294a4484-da93-4c37-9ecf-18f68f4ad64d-metrics-certs") pod "network-metrics-daemon-qvlbd" (UID: "294a4484-da93-4c37-9ecf-18f68f4ad64d") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 09:30:17 crc kubenswrapper[4779]: I0929 09:30:17.979299 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:17 crc kubenswrapper[4779]: I0929 09:30:17.979693 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:17 crc kubenswrapper[4779]: I0929 09:30:17.979734 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:17 crc kubenswrapper[4779]: I0929 09:30:17.979755 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:17 crc kubenswrapper[4779]: I0929 09:30:17.979768 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:17Z","lastTransitionTime":"2025-09-29T09:30:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.087135 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.087194 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.087212 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.087235 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.087263 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:18Z","lastTransitionTime":"2025-09-29T09:30:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.190729 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.190779 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.190795 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.190818 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.190836 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:18Z","lastTransitionTime":"2025-09-29T09:30:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.294007 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.294105 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.294132 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.294174 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.294203 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:18Z","lastTransitionTime":"2025-09-29T09:30:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.398098 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.398480 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.398638 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.398985 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.399145 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:18Z","lastTransitionTime":"2025-09-29T09:30:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.503097 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.503158 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.503174 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.503199 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.503220 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:18Z","lastTransitionTime":"2025-09-29T09:30:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.569451 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.569510 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.569528 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.569557 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.569579 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:18Z","lastTransitionTime":"2025-09-29T09:30:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:18 crc kubenswrapper[4779]: E0929 09:30:18.593056 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"25d3a4e6-deea-47ab-ac6b-f80ccadc03c7\\\",\\\"systemUUID\\\":\\\"6af97324-aa9b-4cb6-ab41-66056c52c25a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:18Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.598697 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.598764 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.598783 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.598808 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.598826 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:18Z","lastTransitionTime":"2025-09-29T09:30:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:18 crc kubenswrapper[4779]: E0929 09:30:18.620420 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"25d3a4e6-deea-47ab-ac6b-f80ccadc03c7\\\",\\\"systemUUID\\\":\\\"6af97324-aa9b-4cb6-ab41-66056c52c25a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:18Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.626115 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.626191 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.626211 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.626237 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.626256 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:18Z","lastTransitionTime":"2025-09-29T09:30:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:18 crc kubenswrapper[4779]: E0929 09:30:18.646960 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"25d3a4e6-deea-47ab-ac6b-f80ccadc03c7\\\",\\\"systemUUID\\\":\\\"6af97324-aa9b-4cb6-ab41-66056c52c25a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:18Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.652837 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.652972 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.653005 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.653036 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.653059 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:18Z","lastTransitionTime":"2025-09-29T09:30:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:18 crc kubenswrapper[4779]: E0929 09:30:18.677260 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"25d3a4e6-deea-47ab-ac6b-f80ccadc03c7\\\",\\\"systemUUID\\\":\\\"6af97324-aa9b-4cb6-ab41-66056c52c25a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:18Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.682830 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.682872 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.682889 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.682941 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.682960 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:18Z","lastTransitionTime":"2025-09-29T09:30:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:18 crc kubenswrapper[4779]: E0929 09:30:18.703595 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"25d3a4e6-deea-47ab-ac6b-f80ccadc03c7\\\",\\\"systemUUID\\\":\\\"6af97324-aa9b-4cb6-ab41-66056c52c25a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:18Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:18 crc kubenswrapper[4779]: E0929 09:30:18.703952 4779 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.706225 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.706290 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.706315 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.706343 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.706362 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:18Z","lastTransitionTime":"2025-09-29T09:30:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.713678 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:30:18 crc kubenswrapper[4779]: E0929 09:30:18.713886 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.715215 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:30:18 crc kubenswrapper[4779]: E0929 09:30:18.717843 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.749842 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0846537-9071-453d-968b-7537a7233656\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://103d18548d558c96520d7c4c31d3ef44abc2905afa5c26ca3f93d2a1cd6ff604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ce06a39c8fd0fba9a0bd1af96ff34cfb10a153801ecff617db4df5e64eb476b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe3edd15b34d8223ba98808b34bee098cb2ffa1ac4fd3e949007acc04303e370\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22cc6e6e78a7eaac01a6c53d5e5527ef3120c8c44bf28813e65038f939e79872\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cc3416bb385ad0ff953b03c268f186747c47a2dfdf11ba34a3b00be5391e90e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8442bd56bf7c2e6708a3b0f6c21ba803d9bc76a84d32647b507d5961a78e8621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8442bd56bf7c2e6708a3b0f6c21ba803d9bc76a84d32647b507d5961a78e8621\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b03f58ad046bc93040f00ae10e8f265099417fcb84c10b4ca1707d6744729648\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b03f58ad046bc93040f00ae10e8f265099417fcb84c10b4ca1707d6744729648\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7e2abd0b0dc5d3b95c510100e11f7f2fa4b05786325b692244ea51633d2f597e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e2abd0b0dc5d3b95c510100e11f7f2fa4b05786325b692244ea51633d2f597e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:18Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.772093 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19a67159-61a5-4f75-940a-212c850bd498\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ff866eb8fdb247e8bfe638be9525a31733ed912047dcbbc7514ab6324675b6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a69616a6ca151dfda518aae66819aac108f73967794661b878db2480ffbb5b16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4bb91e73e9e2666ab82d5a13fee96ed7406f6178863603e607766ae3172d3ee6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2148aa80d9f71c8424530711e10af5b0bd10437655123ac57b87d1c90a280233\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:18Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.791089 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1a5d3a7-37d9-4a87-864c-e4af7f504a19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462a76735c6e380ec32967ba87ebe13a19e1d3687ac87182f01b23ce8fabe8e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7979507aa9ce4c5e1c0f9a0d07716433ab9cebcc02e8b6ba01251ced0bab99f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5lnlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:18Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.809476 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.809562 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.809587 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.809619 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.809642 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:18Z","lastTransitionTime":"2025-09-29T09:30:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.826328 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60d71749-dfb5-4095-b11b-b70f1a549b88\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a334fd50a045f97d97f67b45abf88dc6aaf3d7c2121becc6a9744c86c977b070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d79814aeee8cb50cbe9a3153ed1a99a82921ac740553c82f6aafd1d7f628cbdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7851ecb6e73d56037db56d7d12fec0a095403080daa7bbf0d4cd79d9a4fd1bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa8dee8aadd625828a22e22668d8f7295ddc3fe24976be6c8610da1157e28d7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a11183ba612a838b92d401b13a2e0d7d1c19de533ecef56b4305e5a9f0784bb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d6677148b1efa21f5eee1dcbccef6d6ea0492513bfa621b1a862488653bc8f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c43f1fb41b70a75fdced863cef4c3e186627418facef4d9e5cf4ff1ea6db6432\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97748c7b388a564dd18c6e13387163fc397c49cc10ed1cadaa7f44f95cc17741\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T09:29:59Z\\\",\\\"message\\\":\\\"de crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:59Z is after 2025-08-24T17:21:41Z]\\\\nI0929 09:29:59.880430 6181 services_controller.go:451] Built service openshift-machine-api/control-plane-machine-set-operator cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/control-plane-machine-set-operator_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/control-plane-machine-set-operator\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.41\\\\\\\", Port:9443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0929 09:29:59.880447 6181 services_controller.go:452] Built service openshift-machine-api/control-plane-machine-set-operator per-node LB for network=default: []services.LB{}\\\\nI0929 09:29:59.\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:59Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c43f1fb41b70a75fdced863cef4c3e186627418facef4d9e5cf4ff1ea6db6432\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T09:30:11Z\\\",\\\"message\\\":\\\"o start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:11Z is after 2025-08-24T17:21:41Z]\\\\nI0929 09:30:11.835139 6393 ovn.go:134] Ensuring zone local for Pod openshift-image-registry/node-ca-gx6f2 in node crc\\\\nI0929 09:30:11.835086 6393 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-network-console/networking-console-plugin_TCP_cluster\\\\\\\", UUID:\\\\\\\"ab0b1d51-5ec6-479b-8881-93dfa8d30337\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-network-console/networking-console-plugin\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-network-console/networking-console-plugin_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Pro\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:30:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e140d46db39380a5737d78e1aa00ad1a5a9cc217de910ca4212beae87764795\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ncxc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:18Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.846132 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fxjq2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c469d18e-5b53-4a41-9305-87ae2f8db671\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c8ba3507185bd18ea878f73feed6ba33fb15a49a9f2575a66a88ac28fedef14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:30:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqdj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://01f30b5162df9f09f1af03ebeb5f7218a68b49bf4ff38cd06f3dad13d5042f88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:30:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqdj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:30:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fxjq2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:18Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.860765 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:18Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.875847 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2adc78ec8f54811c8fd09a5e558132f9f57501d9a92d4738935148bb4e714eb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:18Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.888837 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5584" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"564bff56-93cb-42ac-bd34-bbe97f99f411\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedbde6cf29bb77c762d7da2d2e414d3e7c2c6edb3addd97473367b89b2e3fe7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvwtv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5584\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:18Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.905554 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gx6f2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25e887ba-720d-4f7b-9763-5703781fd8bf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a1604b3eb7d80cc28d7d36550e740405d3012bb83109444bf7354793ebbdeb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9lw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gx6f2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:18Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.913812 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.913975 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.914009 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.914040 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.914071 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:18Z","lastTransitionTime":"2025-09-29T09:30:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.921740 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-qvlbd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"294a4484-da93-4c37-9ecf-18f68f4ad64d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nsgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nsgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:30:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-qvlbd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:18Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.944377 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e291840-9db5-4515-810a-190428263dfb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6f627c2a2c06fbe034514d02939db8b7576b8cb753aa65a45a271482cf138c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde5335a7ca2db0e56ded3f2fc58506882df09718e6ada6dd1849b891ebc3dd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0f81c275d3f99ff02d4d105dfff34249cff38150c1aa6199bdc765773f0fa2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33e68ef001a27a30ef12923f95eaaef45667d18c8dab2997491caf12f595ed0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6837fc3f6d91505858a90e49945041b663c864b4ebcbd2ecc086c3575b47c1f7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T09:29:42Z\\\",\\\"message\\\":\\\"W0929 09:29:31.795697 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 09:29:31.796138 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759138171 cert, and key in /tmp/serving-cert-3245563206/serving-signer.crt, /tmp/serving-cert-3245563206/serving-signer.key\\\\nI0929 09:29:32.157414 1 observer_polling.go:159] Starting file observer\\\\nW0929 09:29:32.160455 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 09:29:32.160736 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 09:29:32.163324 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3245563206/tls.crt::/tmp/serving-cert-3245563206/tls.key\\\\\\\"\\\\nF0929 09:29:42.581603 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55099c1030bd4943e0df0b13b1203c7143f79edf5e02c8353ccd042610e8059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:18Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.965628 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:18Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:18 crc kubenswrapper[4779]: I0929 09:30:18.988234 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f96f3b3b0028e52c62e9b886d5f9c3347ec85f91ec0e5ef3de48bd216a2c687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f10690e584c904be791611af52b807ee3d20cb5551821042a1c5e71e1f1571d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:18Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:19 crc kubenswrapper[4779]: I0929 09:30:19.009375 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f75b386997d5f82c8a8136e8758dac2b770b2ef60bec255c70978046c3e6cd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:19Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:19 crc kubenswrapper[4779]: I0929 09:30:19.017698 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:19 crc kubenswrapper[4779]: I0929 09:30:19.017753 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:19 crc kubenswrapper[4779]: I0929 09:30:19.017772 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:19 crc kubenswrapper[4779]: I0929 09:30:19.017798 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:19 crc kubenswrapper[4779]: I0929 09:30:19.017816 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:19Z","lastTransitionTime":"2025-09-29T09:30:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:19 crc kubenswrapper[4779]: I0929 09:30:19.030054 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:19Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:19 crc kubenswrapper[4779]: I0929 09:30:19.047032 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2tkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b0e23f7-a478-48e2-a745-193a90e87553\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://944ced544427edda31dc5b05053a08a83a71829a896cb6dba002ca20ee3e56e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5rx8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2tkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:19Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:19 crc kubenswrapper[4779]: I0929 09:30:19.070004 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-twvvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3a42c70c25df51312e2e5ede663678cbccf94d50dad43886557024669dd2e82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfe38229bcd948f0f8e1e234c8e4ea16d4dbc8c5c4252f5486048db0c9910412\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfe38229bcd948f0f8e1e234c8e4ea16d4dbc8c5c4252f5486048db0c9910412\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acbf355d9614f64771657a4103437e9c55f3b2b5080282d7ad3af70a5f44ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbf355d9614f64771657a4103437e9c55f3b2b5080282d7ad3af70a5f44ffce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b854329d7e329a9099a032b8e91ab2064164ae0e43e57b61c5c55b42fca2a9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b854329d7e329a9099a032b8e91ab2064164ae0e43e57b61c5c55b42fca2a9ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93312b300c9348ca8caa0f12bd7825d6b7d1d720babd765aa679c570220ac15a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93312b300c9348ca8caa0f12bd7825d6b7d1d720babd765aa679c570220ac15a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-twvvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:19Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:19 crc kubenswrapper[4779]: I0929 09:30:19.121246 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:19 crc kubenswrapper[4779]: I0929 09:30:19.121300 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:19 crc kubenswrapper[4779]: I0929 09:30:19.121318 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:19 crc kubenswrapper[4779]: I0929 09:30:19.121340 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:19 crc kubenswrapper[4779]: I0929 09:30:19.121357 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:19Z","lastTransitionTime":"2025-09-29T09:30:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:19 crc kubenswrapper[4779]: I0929 09:30:19.225385 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:19 crc kubenswrapper[4779]: I0929 09:30:19.225452 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:19 crc kubenswrapper[4779]: I0929 09:30:19.225475 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:19 crc kubenswrapper[4779]: I0929 09:30:19.225506 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:19 crc kubenswrapper[4779]: I0929 09:30:19.225530 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:19Z","lastTransitionTime":"2025-09-29T09:30:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:19 crc kubenswrapper[4779]: I0929 09:30:19.330248 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:19 crc kubenswrapper[4779]: I0929 09:30:19.330325 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:19 crc kubenswrapper[4779]: I0929 09:30:19.330344 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:19 crc kubenswrapper[4779]: I0929 09:30:19.330370 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:19 crc kubenswrapper[4779]: I0929 09:30:19.330389 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:19Z","lastTransitionTime":"2025-09-29T09:30:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:19 crc kubenswrapper[4779]: I0929 09:30:19.433496 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:19 crc kubenswrapper[4779]: I0929 09:30:19.433566 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:19 crc kubenswrapper[4779]: I0929 09:30:19.433584 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:19 crc kubenswrapper[4779]: I0929 09:30:19.433608 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:19 crc kubenswrapper[4779]: I0929 09:30:19.433624 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:19Z","lastTransitionTime":"2025-09-29T09:30:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:19 crc kubenswrapper[4779]: I0929 09:30:19.537119 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:19 crc kubenswrapper[4779]: I0929 09:30:19.537174 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:19 crc kubenswrapper[4779]: I0929 09:30:19.537190 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:19 crc kubenswrapper[4779]: I0929 09:30:19.537212 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:19 crc kubenswrapper[4779]: I0929 09:30:19.537257 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:19Z","lastTransitionTime":"2025-09-29T09:30:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:19 crc kubenswrapper[4779]: I0929 09:30:19.640639 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:19 crc kubenswrapper[4779]: I0929 09:30:19.640716 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:19 crc kubenswrapper[4779]: I0929 09:30:19.640735 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:19 crc kubenswrapper[4779]: I0929 09:30:19.640762 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:19 crc kubenswrapper[4779]: I0929 09:30:19.640783 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:19Z","lastTransitionTime":"2025-09-29T09:30:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:19 crc kubenswrapper[4779]: I0929 09:30:19.713471 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:30:19 crc kubenswrapper[4779]: I0929 09:30:19.713573 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:30:19 crc kubenswrapper[4779]: E0929 09:30:19.713679 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvlbd" podUID="294a4484-da93-4c37-9ecf-18f68f4ad64d" Sep 29 09:30:19 crc kubenswrapper[4779]: E0929 09:30:19.713839 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 09:30:19 crc kubenswrapper[4779]: I0929 09:30:19.744045 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:19 crc kubenswrapper[4779]: I0929 09:30:19.744095 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:19 crc kubenswrapper[4779]: I0929 09:30:19.744111 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:19 crc kubenswrapper[4779]: I0929 09:30:19.744136 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:19 crc kubenswrapper[4779]: I0929 09:30:19.744153 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:19Z","lastTransitionTime":"2025-09-29T09:30:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:19 crc kubenswrapper[4779]: I0929 09:30:19.846896 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:19 crc kubenswrapper[4779]: I0929 09:30:19.847050 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:19 crc kubenswrapper[4779]: I0929 09:30:19.847072 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:19 crc kubenswrapper[4779]: I0929 09:30:19.847102 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:19 crc kubenswrapper[4779]: I0929 09:30:19.847155 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:19Z","lastTransitionTime":"2025-09-29T09:30:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:19 crc kubenswrapper[4779]: I0929 09:30:19.950128 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:19 crc kubenswrapper[4779]: I0929 09:30:19.950205 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:19 crc kubenswrapper[4779]: I0929 09:30:19.950228 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:19 crc kubenswrapper[4779]: I0929 09:30:19.950263 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:19 crc kubenswrapper[4779]: I0929 09:30:19.950286 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:19Z","lastTransitionTime":"2025-09-29T09:30:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.052380 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.052441 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.052457 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.052483 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.052500 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:20Z","lastTransitionTime":"2025-09-29T09:30:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.155384 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.155446 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.155474 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.155505 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.155527 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:20Z","lastTransitionTime":"2025-09-29T09:30:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.257955 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.258015 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.258027 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.258045 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.258054 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:20Z","lastTransitionTime":"2025-09-29T09:30:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.361339 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.361412 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.361433 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.361459 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.361479 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:20Z","lastTransitionTime":"2025-09-29T09:30:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.464173 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.464243 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.464262 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.464291 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.464309 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:20Z","lastTransitionTime":"2025-09-29T09:30:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.552715 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.552855 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:30:20 crc kubenswrapper[4779]: E0929 09:30:20.553005 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 09:30:52.552968275 +0000 UTC m=+84.534292219 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:30:20 crc kubenswrapper[4779]: E0929 09:30:20.553015 4779 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.553047 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:30:20 crc kubenswrapper[4779]: E0929 09:30:20.553113 4779 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 09:30:20 crc kubenswrapper[4779]: E0929 09:30:20.553171 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 09:30:52.553155511 +0000 UTC m=+84.534479425 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 09:30:20 crc kubenswrapper[4779]: E0929 09:30:20.553192 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 09:30:52.553182042 +0000 UTC m=+84.534505956 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.566875 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.566969 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.567000 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.567033 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.567057 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:20Z","lastTransitionTime":"2025-09-29T09:30:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.654039 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.654180 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:30:20 crc kubenswrapper[4779]: E0929 09:30:20.654329 4779 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 09:30:20 crc kubenswrapper[4779]: E0929 09:30:20.654374 4779 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 09:30:20 crc kubenswrapper[4779]: E0929 09:30:20.654379 4779 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 09:30:20 crc kubenswrapper[4779]: E0929 09:30:20.654396 4779 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 09:30:20 crc kubenswrapper[4779]: E0929 09:30:20.654411 4779 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 09:30:20 crc kubenswrapper[4779]: E0929 09:30:20.654430 4779 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 09:30:20 crc kubenswrapper[4779]: E0929 09:30:20.654501 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-29 09:30:52.65446285 +0000 UTC m=+84.635786794 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 09:30:20 crc kubenswrapper[4779]: E0929 09:30:20.654550 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-29 09:30:52.654529252 +0000 UTC m=+84.635853186 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.670257 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.670308 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.670326 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.670350 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.670374 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:20Z","lastTransitionTime":"2025-09-29T09:30:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.713192 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.713303 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:30:20 crc kubenswrapper[4779]: E0929 09:30:20.713540 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 09:30:20 crc kubenswrapper[4779]: E0929 09:30:20.713677 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.771460 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.773080 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.773140 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.773158 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.773203 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.773220 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:20Z","lastTransitionTime":"2025-09-29T09:30:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.791584 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.807063 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0846537-9071-453d-968b-7537a7233656\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://103d18548d558c96520d7c4c31d3ef44abc2905afa5c26ca3f93d2a1cd6ff604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ce06a39c8fd0fba9a0bd1af96ff34cfb10a153801ecff617db4df5e64eb476b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe3edd15b34d8223ba98808b34bee098cb2ffa1ac4fd3e949007acc04303e370\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22cc6e6e78a7eaac01a6c53d5e5527ef3120c8c44bf28813e65038f939e79872\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cc3416bb385ad0ff953b03c268f186747c47a2dfdf11ba34a3b00be5391e90e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8442bd56bf7c2e6708a3b0f6c21ba803d9bc76a84d32647b507d5961a78e8621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8442bd56bf7c2e6708a3b0f6c21ba803d9bc76a84d32647b507d5961a78e8621\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b03f58ad046bc93040f00ae10e8f265099417fcb84c10b4ca1707d6744729648\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b03f58ad046bc93040f00ae10e8f265099417fcb84c10b4ca1707d6744729648\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7e2abd0b0dc5d3b95c510100e11f7f2fa4b05786325b692244ea51633d2f597e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e2abd0b0dc5d3b95c510100e11f7f2fa4b05786325b692244ea51633d2f597e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:20Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.828481 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19a67159-61a5-4f75-940a-212c850bd498\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ff866eb8fdb247e8bfe638be9525a31733ed912047dcbbc7514ab6324675b6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a69616a6ca151dfda518aae66819aac108f73967794661b878db2480ffbb5b16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4bb91e73e9e2666ab82d5a13fee96ed7406f6178863603e607766ae3172d3ee6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2148aa80d9f71c8424530711e10af5b0bd10437655123ac57b87d1c90a280233\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:20Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.847453 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1a5d3a7-37d9-4a87-864c-e4af7f504a19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462a76735c6e380ec32967ba87ebe13a19e1d3687ac87182f01b23ce8fabe8e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7979507aa9ce4c5e1c0f9a0d07716433ab9cebcc02e8b6ba01251ced0bab99f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5lnlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:20Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.876844 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.876954 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.876983 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.877016 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.877040 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:20Z","lastTransitionTime":"2025-09-29T09:30:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.880821 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60d71749-dfb5-4095-b11b-b70f1a549b88\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a334fd50a045f97d97f67b45abf88dc6aaf3d7c2121becc6a9744c86c977b070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d79814aeee8cb50cbe9a3153ed1a99a82921ac740553c82f6aafd1d7f628cbdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7851ecb6e73d56037db56d7d12fec0a095403080daa7bbf0d4cd79d9a4fd1bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa8dee8aadd625828a22e22668d8f7295ddc3fe24976be6c8610da1157e28d7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a11183ba612a838b92d401b13a2e0d7d1c19de533ecef56b4305e5a9f0784bb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d6677148b1efa21f5eee1dcbccef6d6ea0492513bfa621b1a862488653bc8f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c43f1fb41b70a75fdced863cef4c3e186627418facef4d9e5cf4ff1ea6db6432\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97748c7b388a564dd18c6e13387163fc397c49cc10ed1cadaa7f44f95cc17741\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T09:29:59Z\\\",\\\"message\\\":\\\"de crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:29:59Z is after 2025-08-24T17:21:41Z]\\\\nI0929 09:29:59.880430 6181 services_controller.go:451] Built service openshift-machine-api/control-plane-machine-set-operator cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/control-plane-machine-set-operator_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/control-plane-machine-set-operator\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.41\\\\\\\", Port:9443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0929 09:29:59.880447 6181 services_controller.go:452] Built service openshift-machine-api/control-plane-machine-set-operator per-node LB for network=default: []services.LB{}\\\\nI0929 09:29:59.\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:59Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c43f1fb41b70a75fdced863cef4c3e186627418facef4d9e5cf4ff1ea6db6432\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T09:30:11Z\\\",\\\"message\\\":\\\"o start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:11Z is after 2025-08-24T17:21:41Z]\\\\nI0929 09:30:11.835139 6393 ovn.go:134] Ensuring zone local for Pod openshift-image-registry/node-ca-gx6f2 in node crc\\\\nI0929 09:30:11.835086 6393 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-network-console/networking-console-plugin_TCP_cluster\\\\\\\", UUID:\\\\\\\"ab0b1d51-5ec6-479b-8881-93dfa8d30337\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-network-console/networking-console-plugin\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-network-console/networking-console-plugin_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Pro\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:30:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e140d46db39380a5737d78e1aa00ad1a5a9cc217de910ca4212beae87764795\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ncxc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:20Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.901433 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fxjq2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c469d18e-5b53-4a41-9305-87ae2f8db671\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c8ba3507185bd18ea878f73feed6ba33fb15a49a9f2575a66a88ac28fedef14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:30:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqdj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://01f30b5162df9f09f1af03ebeb5f7218a68b49bf4ff38cd06f3dad13d5042f88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:30:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqdj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:30:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fxjq2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:20Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.921530 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:20Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.943298 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2adc78ec8f54811c8fd09a5e558132f9f57501d9a92d4738935148bb4e714eb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:20Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.959859 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5584" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"564bff56-93cb-42ac-bd34-bbe97f99f411\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedbde6cf29bb77c762d7da2d2e414d3e7c2c6edb3addd97473367b89b2e3fe7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvwtv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5584\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:20Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.974315 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gx6f2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25e887ba-720d-4f7b-9763-5703781fd8bf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a1604b3eb7d80cc28d7d36550e740405d3012bb83109444bf7354793ebbdeb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9lw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gx6f2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:20Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.981133 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.981184 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.981203 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.981228 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.981247 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:20Z","lastTransitionTime":"2025-09-29T09:30:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:20 crc kubenswrapper[4779]: I0929 09:30:20.988769 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-qvlbd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"294a4484-da93-4c37-9ecf-18f68f4ad64d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nsgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nsgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:30:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-qvlbd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:20Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:21 crc kubenswrapper[4779]: I0929 09:30:21.003510 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:21Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:21 crc kubenswrapper[4779]: I0929 09:30:21.023127 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2tkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b0e23f7-a478-48e2-a745-193a90e87553\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://944ced544427edda31dc5b05053a08a83a71829a896cb6dba002ca20ee3e56e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5rx8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2tkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:21Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:21 crc kubenswrapper[4779]: I0929 09:30:21.044646 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-twvvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3a42c70c25df51312e2e5ede663678cbccf94d50dad43886557024669dd2e82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfe38229bcd948f0f8e1e234c8e4ea16d4dbc8c5c4252f5486048db0c9910412\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfe38229bcd948f0f8e1e234c8e4ea16d4dbc8c5c4252f5486048db0c9910412\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acbf355d9614f64771657a4103437e9c55f3b2b5080282d7ad3af70a5f44ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbf355d9614f64771657a4103437e9c55f3b2b5080282d7ad3af70a5f44ffce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b854329d7e329a9099a032b8e91ab2064164ae0e43e57b61c5c55b42fca2a9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b854329d7e329a9099a032b8e91ab2064164ae0e43e57b61c5c55b42fca2a9ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93312b300c9348ca8caa0f12bd7825d6b7d1d720babd765aa679c570220ac15a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93312b300c9348ca8caa0f12bd7825d6b7d1d720babd765aa679c570220ac15a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-twvvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:21Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:21 crc kubenswrapper[4779]: I0929 09:30:21.068085 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e291840-9db5-4515-810a-190428263dfb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6f627c2a2c06fbe034514d02939db8b7576b8cb753aa65a45a271482cf138c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde5335a7ca2db0e56ded3f2fc58506882df09718e6ada6dd1849b891ebc3dd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0f81c275d3f99ff02d4d105dfff34249cff38150c1aa6199bdc765773f0fa2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33e68ef001a27a30ef12923f95eaaef45667d18c8dab2997491caf12f595ed0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6837fc3f6d91505858a90e49945041b663c864b4ebcbd2ecc086c3575b47c1f7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T09:29:42Z\\\",\\\"message\\\":\\\"W0929 09:29:31.795697 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 09:29:31.796138 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759138171 cert, and key in /tmp/serving-cert-3245563206/serving-signer.crt, /tmp/serving-cert-3245563206/serving-signer.key\\\\nI0929 09:29:32.157414 1 observer_polling.go:159] Starting file observer\\\\nW0929 09:29:32.160455 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 09:29:32.160736 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 09:29:32.163324 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3245563206/tls.crt::/tmp/serving-cert-3245563206/tls.key\\\\\\\"\\\\nF0929 09:29:42.581603 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55099c1030bd4943e0df0b13b1203c7143f79edf5e02c8353ccd042610e8059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:21Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:21 crc kubenswrapper[4779]: I0929 09:30:21.081565 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:21Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:21 crc kubenswrapper[4779]: I0929 09:30:21.084016 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:21 crc kubenswrapper[4779]: I0929 09:30:21.084055 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:21 crc kubenswrapper[4779]: I0929 09:30:21.084066 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:21 crc kubenswrapper[4779]: I0929 09:30:21.084084 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:21 crc kubenswrapper[4779]: I0929 09:30:21.084095 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:21Z","lastTransitionTime":"2025-09-29T09:30:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:21 crc kubenswrapper[4779]: I0929 09:30:21.096201 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f96f3b3b0028e52c62e9b886d5f9c3347ec85f91ec0e5ef3de48bd216a2c687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f10690e584c904be791611af52b807ee3d20cb5551821042a1c5e71e1f1571d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:21Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:21 crc kubenswrapper[4779]: I0929 09:30:21.148059 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f75b386997d5f82c8a8136e8758dac2b770b2ef60bec255c70978046c3e6cd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:21Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:21 crc kubenswrapper[4779]: I0929 09:30:21.185924 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:21 crc kubenswrapper[4779]: I0929 09:30:21.185963 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:21 crc kubenswrapper[4779]: I0929 09:30:21.185974 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:21 crc kubenswrapper[4779]: I0929 09:30:21.185991 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:21 crc kubenswrapper[4779]: I0929 09:30:21.186006 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:21Z","lastTransitionTime":"2025-09-29T09:30:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:21 crc kubenswrapper[4779]: I0929 09:30:21.287881 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:21 crc kubenswrapper[4779]: I0929 09:30:21.287959 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:21 crc kubenswrapper[4779]: I0929 09:30:21.287976 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:21 crc kubenswrapper[4779]: I0929 09:30:21.287998 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:21 crc kubenswrapper[4779]: I0929 09:30:21.288010 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:21Z","lastTransitionTime":"2025-09-29T09:30:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:21 crc kubenswrapper[4779]: I0929 09:30:21.389858 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:21 crc kubenswrapper[4779]: I0929 09:30:21.389935 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:21 crc kubenswrapper[4779]: I0929 09:30:21.389949 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:21 crc kubenswrapper[4779]: I0929 09:30:21.389965 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:21 crc kubenswrapper[4779]: I0929 09:30:21.389977 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:21Z","lastTransitionTime":"2025-09-29T09:30:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:21 crc kubenswrapper[4779]: I0929 09:30:21.491916 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:21 crc kubenswrapper[4779]: I0929 09:30:21.491995 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:21 crc kubenswrapper[4779]: I0929 09:30:21.492007 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:21 crc kubenswrapper[4779]: I0929 09:30:21.492024 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:21 crc kubenswrapper[4779]: I0929 09:30:21.492038 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:21Z","lastTransitionTime":"2025-09-29T09:30:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:21 crc kubenswrapper[4779]: I0929 09:30:21.594577 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:21 crc kubenswrapper[4779]: I0929 09:30:21.594615 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:21 crc kubenswrapper[4779]: I0929 09:30:21.594626 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:21 crc kubenswrapper[4779]: I0929 09:30:21.594639 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:21 crc kubenswrapper[4779]: I0929 09:30:21.594648 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:21Z","lastTransitionTime":"2025-09-29T09:30:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:21 crc kubenswrapper[4779]: I0929 09:30:21.696554 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:21 crc kubenswrapper[4779]: I0929 09:30:21.696619 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:21 crc kubenswrapper[4779]: I0929 09:30:21.696640 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:21 crc kubenswrapper[4779]: I0929 09:30:21.696668 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:21 crc kubenswrapper[4779]: I0929 09:30:21.696686 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:21Z","lastTransitionTime":"2025-09-29T09:30:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:21 crc kubenswrapper[4779]: I0929 09:30:21.713764 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:30:21 crc kubenswrapper[4779]: E0929 09:30:21.713897 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 09:30:21 crc kubenswrapper[4779]: I0929 09:30:21.713771 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:30:21 crc kubenswrapper[4779]: E0929 09:30:21.714117 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvlbd" podUID="294a4484-da93-4c37-9ecf-18f68f4ad64d" Sep 29 09:30:21 crc kubenswrapper[4779]: I0929 09:30:21.799084 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:21 crc kubenswrapper[4779]: I0929 09:30:21.799139 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:21 crc kubenswrapper[4779]: I0929 09:30:21.799152 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:21 crc kubenswrapper[4779]: I0929 09:30:21.799171 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:21 crc kubenswrapper[4779]: I0929 09:30:21.799184 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:21Z","lastTransitionTime":"2025-09-29T09:30:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:21 crc kubenswrapper[4779]: I0929 09:30:21.902397 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:21 crc kubenswrapper[4779]: I0929 09:30:21.902481 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:21 crc kubenswrapper[4779]: I0929 09:30:21.902500 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:21 crc kubenswrapper[4779]: I0929 09:30:21.902525 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:21 crc kubenswrapper[4779]: I0929 09:30:21.902544 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:21Z","lastTransitionTime":"2025-09-29T09:30:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:22 crc kubenswrapper[4779]: I0929 09:30:22.004397 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:22 crc kubenswrapper[4779]: I0929 09:30:22.004440 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:22 crc kubenswrapper[4779]: I0929 09:30:22.004451 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:22 crc kubenswrapper[4779]: I0929 09:30:22.004469 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:22 crc kubenswrapper[4779]: I0929 09:30:22.004480 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:22Z","lastTransitionTime":"2025-09-29T09:30:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:22 crc kubenswrapper[4779]: I0929 09:30:22.107043 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:22 crc kubenswrapper[4779]: I0929 09:30:22.107076 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:22 crc kubenswrapper[4779]: I0929 09:30:22.107085 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:22 crc kubenswrapper[4779]: I0929 09:30:22.107098 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:22 crc kubenswrapper[4779]: I0929 09:30:22.107106 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:22Z","lastTransitionTime":"2025-09-29T09:30:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:22 crc kubenswrapper[4779]: I0929 09:30:22.209255 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:22 crc kubenswrapper[4779]: I0929 09:30:22.209298 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:22 crc kubenswrapper[4779]: I0929 09:30:22.209313 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:22 crc kubenswrapper[4779]: I0929 09:30:22.209333 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:22 crc kubenswrapper[4779]: I0929 09:30:22.209349 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:22Z","lastTransitionTime":"2025-09-29T09:30:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:22 crc kubenswrapper[4779]: I0929 09:30:22.312377 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:22 crc kubenswrapper[4779]: I0929 09:30:22.312426 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:22 crc kubenswrapper[4779]: I0929 09:30:22.312435 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:22 crc kubenswrapper[4779]: I0929 09:30:22.312455 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:22 crc kubenswrapper[4779]: I0929 09:30:22.312464 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:22Z","lastTransitionTime":"2025-09-29T09:30:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:22 crc kubenswrapper[4779]: I0929 09:30:22.415207 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:22 crc kubenswrapper[4779]: I0929 09:30:22.415238 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:22 crc kubenswrapper[4779]: I0929 09:30:22.415246 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:22 crc kubenswrapper[4779]: I0929 09:30:22.415263 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:22 crc kubenswrapper[4779]: I0929 09:30:22.415275 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:22Z","lastTransitionTime":"2025-09-29T09:30:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:22 crc kubenswrapper[4779]: I0929 09:30:22.517253 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:22 crc kubenswrapper[4779]: I0929 09:30:22.517291 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:22 crc kubenswrapper[4779]: I0929 09:30:22.517299 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:22 crc kubenswrapper[4779]: I0929 09:30:22.517315 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:22 crc kubenswrapper[4779]: I0929 09:30:22.517343 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:22Z","lastTransitionTime":"2025-09-29T09:30:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:22 crc kubenswrapper[4779]: I0929 09:30:22.619874 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:22 crc kubenswrapper[4779]: I0929 09:30:22.619928 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:22 crc kubenswrapper[4779]: I0929 09:30:22.619939 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:22 crc kubenswrapper[4779]: I0929 09:30:22.619955 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:22 crc kubenswrapper[4779]: I0929 09:30:22.619966 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:22Z","lastTransitionTime":"2025-09-29T09:30:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:22 crc kubenswrapper[4779]: I0929 09:30:22.713830 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:30:22 crc kubenswrapper[4779]: I0929 09:30:22.713886 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:30:22 crc kubenswrapper[4779]: E0929 09:30:22.714123 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 09:30:22 crc kubenswrapper[4779]: E0929 09:30:22.714254 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 09:30:22 crc kubenswrapper[4779]: I0929 09:30:22.721825 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:22 crc kubenswrapper[4779]: I0929 09:30:22.721863 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:22 crc kubenswrapper[4779]: I0929 09:30:22.721873 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:22 crc kubenswrapper[4779]: I0929 09:30:22.721918 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:22 crc kubenswrapper[4779]: I0929 09:30:22.721954 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:22Z","lastTransitionTime":"2025-09-29T09:30:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:22 crc kubenswrapper[4779]: I0929 09:30:22.824605 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:22 crc kubenswrapper[4779]: I0929 09:30:22.824644 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:22 crc kubenswrapper[4779]: I0929 09:30:22.824678 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:22 crc kubenswrapper[4779]: I0929 09:30:22.824693 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:22 crc kubenswrapper[4779]: I0929 09:30:22.824702 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:22Z","lastTransitionTime":"2025-09-29T09:30:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:22 crc kubenswrapper[4779]: I0929 09:30:22.928251 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:22 crc kubenswrapper[4779]: I0929 09:30:22.928322 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:22 crc kubenswrapper[4779]: I0929 09:30:22.928355 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:22 crc kubenswrapper[4779]: I0929 09:30:22.928384 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:22 crc kubenswrapper[4779]: I0929 09:30:22.928406 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:22Z","lastTransitionTime":"2025-09-29T09:30:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:23 crc kubenswrapper[4779]: I0929 09:30:23.030714 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:23 crc kubenswrapper[4779]: I0929 09:30:23.030770 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:23 crc kubenswrapper[4779]: I0929 09:30:23.030809 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:23 crc kubenswrapper[4779]: I0929 09:30:23.030843 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:23 crc kubenswrapper[4779]: I0929 09:30:23.030865 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:23Z","lastTransitionTime":"2025-09-29T09:30:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:23 crc kubenswrapper[4779]: I0929 09:30:23.133374 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:23 crc kubenswrapper[4779]: I0929 09:30:23.133434 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:23 crc kubenswrapper[4779]: I0929 09:30:23.133472 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:23 crc kubenswrapper[4779]: I0929 09:30:23.133507 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:23 crc kubenswrapper[4779]: I0929 09:30:23.133531 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:23Z","lastTransitionTime":"2025-09-29T09:30:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:23 crc kubenswrapper[4779]: I0929 09:30:23.236544 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:23 crc kubenswrapper[4779]: I0929 09:30:23.236594 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:23 crc kubenswrapper[4779]: I0929 09:30:23.236605 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:23 crc kubenswrapper[4779]: I0929 09:30:23.236624 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:23 crc kubenswrapper[4779]: I0929 09:30:23.236636 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:23Z","lastTransitionTime":"2025-09-29T09:30:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:23 crc kubenswrapper[4779]: I0929 09:30:23.340090 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:23 crc kubenswrapper[4779]: I0929 09:30:23.340158 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:23 crc kubenswrapper[4779]: I0929 09:30:23.340182 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:23 crc kubenswrapper[4779]: I0929 09:30:23.340213 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:23 crc kubenswrapper[4779]: I0929 09:30:23.340234 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:23Z","lastTransitionTime":"2025-09-29T09:30:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:23 crc kubenswrapper[4779]: I0929 09:30:23.443069 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:23 crc kubenswrapper[4779]: I0929 09:30:23.443135 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:23 crc kubenswrapper[4779]: I0929 09:30:23.443154 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:23 crc kubenswrapper[4779]: I0929 09:30:23.443181 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:23 crc kubenswrapper[4779]: I0929 09:30:23.443198 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:23Z","lastTransitionTime":"2025-09-29T09:30:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:23 crc kubenswrapper[4779]: I0929 09:30:23.546220 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:23 crc kubenswrapper[4779]: I0929 09:30:23.546314 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:23 crc kubenswrapper[4779]: I0929 09:30:23.546332 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:23 crc kubenswrapper[4779]: I0929 09:30:23.546356 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:23 crc kubenswrapper[4779]: I0929 09:30:23.546376 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:23Z","lastTransitionTime":"2025-09-29T09:30:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:23 crc kubenswrapper[4779]: I0929 09:30:23.649719 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:23 crc kubenswrapper[4779]: I0929 09:30:23.649786 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:23 crc kubenswrapper[4779]: I0929 09:30:23.649808 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:23 crc kubenswrapper[4779]: I0929 09:30:23.649837 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:23 crc kubenswrapper[4779]: I0929 09:30:23.649860 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:23Z","lastTransitionTime":"2025-09-29T09:30:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:23 crc kubenswrapper[4779]: I0929 09:30:23.713350 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:30:23 crc kubenswrapper[4779]: I0929 09:30:23.713415 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:30:23 crc kubenswrapper[4779]: E0929 09:30:23.713592 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvlbd" podUID="294a4484-da93-4c37-9ecf-18f68f4ad64d" Sep 29 09:30:23 crc kubenswrapper[4779]: E0929 09:30:23.713676 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 09:30:23 crc kubenswrapper[4779]: I0929 09:30:23.752942 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:23 crc kubenswrapper[4779]: I0929 09:30:23.753005 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:23 crc kubenswrapper[4779]: I0929 09:30:23.753018 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:23 crc kubenswrapper[4779]: I0929 09:30:23.753035 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:23 crc kubenswrapper[4779]: I0929 09:30:23.753048 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:23Z","lastTransitionTime":"2025-09-29T09:30:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:23 crc kubenswrapper[4779]: I0929 09:30:23.855425 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:23 crc kubenswrapper[4779]: I0929 09:30:23.855493 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:23 crc kubenswrapper[4779]: I0929 09:30:23.855515 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:23 crc kubenswrapper[4779]: I0929 09:30:23.855547 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:23 crc kubenswrapper[4779]: I0929 09:30:23.855571 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:23Z","lastTransitionTime":"2025-09-29T09:30:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:23 crc kubenswrapper[4779]: I0929 09:30:23.957987 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:23 crc kubenswrapper[4779]: I0929 09:30:23.958021 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:23 crc kubenswrapper[4779]: I0929 09:30:23.958029 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:23 crc kubenswrapper[4779]: I0929 09:30:23.958042 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:23 crc kubenswrapper[4779]: I0929 09:30:23.958056 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:23Z","lastTransitionTime":"2025-09-29T09:30:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:24 crc kubenswrapper[4779]: I0929 09:30:24.061844 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:24 crc kubenswrapper[4779]: I0929 09:30:24.061963 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:24 crc kubenswrapper[4779]: I0929 09:30:24.061982 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:24 crc kubenswrapper[4779]: I0929 09:30:24.062008 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:24 crc kubenswrapper[4779]: I0929 09:30:24.062029 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:24Z","lastTransitionTime":"2025-09-29T09:30:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:24 crc kubenswrapper[4779]: I0929 09:30:24.164704 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:24 crc kubenswrapper[4779]: I0929 09:30:24.164774 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:24 crc kubenswrapper[4779]: I0929 09:30:24.164796 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:24 crc kubenswrapper[4779]: I0929 09:30:24.164826 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:24 crc kubenswrapper[4779]: I0929 09:30:24.164847 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:24Z","lastTransitionTime":"2025-09-29T09:30:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:24 crc kubenswrapper[4779]: I0929 09:30:24.267084 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:24 crc kubenswrapper[4779]: I0929 09:30:24.267122 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:24 crc kubenswrapper[4779]: I0929 09:30:24.267134 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:24 crc kubenswrapper[4779]: I0929 09:30:24.267150 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:24 crc kubenswrapper[4779]: I0929 09:30:24.267161 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:24Z","lastTransitionTime":"2025-09-29T09:30:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:24 crc kubenswrapper[4779]: I0929 09:30:24.370079 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:24 crc kubenswrapper[4779]: I0929 09:30:24.370156 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:24 crc kubenswrapper[4779]: I0929 09:30:24.370182 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:24 crc kubenswrapper[4779]: I0929 09:30:24.370211 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:24 crc kubenswrapper[4779]: I0929 09:30:24.370233 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:24Z","lastTransitionTime":"2025-09-29T09:30:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:24 crc kubenswrapper[4779]: I0929 09:30:24.473491 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:24 crc kubenswrapper[4779]: I0929 09:30:24.473530 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:24 crc kubenswrapper[4779]: I0929 09:30:24.473539 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:24 crc kubenswrapper[4779]: I0929 09:30:24.473554 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:24 crc kubenswrapper[4779]: I0929 09:30:24.473562 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:24Z","lastTransitionTime":"2025-09-29T09:30:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:24 crc kubenswrapper[4779]: I0929 09:30:24.576770 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:24 crc kubenswrapper[4779]: I0929 09:30:24.576836 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:24 crc kubenswrapper[4779]: I0929 09:30:24.576853 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:24 crc kubenswrapper[4779]: I0929 09:30:24.576878 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:24 crc kubenswrapper[4779]: I0929 09:30:24.576895 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:24Z","lastTransitionTime":"2025-09-29T09:30:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:24 crc kubenswrapper[4779]: I0929 09:30:24.679788 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:24 crc kubenswrapper[4779]: I0929 09:30:24.679841 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:24 crc kubenswrapper[4779]: I0929 09:30:24.679857 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:24 crc kubenswrapper[4779]: I0929 09:30:24.679879 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:24 crc kubenswrapper[4779]: I0929 09:30:24.679896 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:24Z","lastTransitionTime":"2025-09-29T09:30:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:24 crc kubenswrapper[4779]: I0929 09:30:24.713776 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:30:24 crc kubenswrapper[4779]: I0929 09:30:24.713839 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:30:24 crc kubenswrapper[4779]: E0929 09:30:24.714034 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 09:30:24 crc kubenswrapper[4779]: E0929 09:30:24.714147 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 09:30:24 crc kubenswrapper[4779]: I0929 09:30:24.783960 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:24 crc kubenswrapper[4779]: I0929 09:30:24.784026 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:24 crc kubenswrapper[4779]: I0929 09:30:24.784043 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:24 crc kubenswrapper[4779]: I0929 09:30:24.784104 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:24 crc kubenswrapper[4779]: I0929 09:30:24.784134 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:24Z","lastTransitionTime":"2025-09-29T09:30:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:24 crc kubenswrapper[4779]: I0929 09:30:24.887125 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:24 crc kubenswrapper[4779]: I0929 09:30:24.887192 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:24 crc kubenswrapper[4779]: I0929 09:30:24.887212 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:24 crc kubenswrapper[4779]: I0929 09:30:24.887238 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:24 crc kubenswrapper[4779]: I0929 09:30:24.887256 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:24Z","lastTransitionTime":"2025-09-29T09:30:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:24 crc kubenswrapper[4779]: I0929 09:30:24.989478 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:24 crc kubenswrapper[4779]: I0929 09:30:24.989537 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:24 crc kubenswrapper[4779]: I0929 09:30:24.989555 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:24 crc kubenswrapper[4779]: I0929 09:30:24.989581 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:24 crc kubenswrapper[4779]: I0929 09:30:24.989602 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:24Z","lastTransitionTime":"2025-09-29T09:30:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:25 crc kubenswrapper[4779]: I0929 09:30:25.092478 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:25 crc kubenswrapper[4779]: I0929 09:30:25.092555 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:25 crc kubenswrapper[4779]: I0929 09:30:25.092573 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:25 crc kubenswrapper[4779]: I0929 09:30:25.092598 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:25 crc kubenswrapper[4779]: I0929 09:30:25.092616 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:25Z","lastTransitionTime":"2025-09-29T09:30:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:25 crc kubenswrapper[4779]: I0929 09:30:25.195748 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:25 crc kubenswrapper[4779]: I0929 09:30:25.195884 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:25 crc kubenswrapper[4779]: I0929 09:30:25.195950 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:25 crc kubenswrapper[4779]: I0929 09:30:25.195991 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:25 crc kubenswrapper[4779]: I0929 09:30:25.196017 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:25Z","lastTransitionTime":"2025-09-29T09:30:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:25 crc kubenswrapper[4779]: I0929 09:30:25.299780 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:25 crc kubenswrapper[4779]: I0929 09:30:25.299821 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:25 crc kubenswrapper[4779]: I0929 09:30:25.299830 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:25 crc kubenswrapper[4779]: I0929 09:30:25.299844 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:25 crc kubenswrapper[4779]: I0929 09:30:25.299854 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:25Z","lastTransitionTime":"2025-09-29T09:30:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:25 crc kubenswrapper[4779]: I0929 09:30:25.402857 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:25 crc kubenswrapper[4779]: I0929 09:30:25.402963 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:25 crc kubenswrapper[4779]: I0929 09:30:25.402981 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:25 crc kubenswrapper[4779]: I0929 09:30:25.403005 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:25 crc kubenswrapper[4779]: I0929 09:30:25.403021 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:25Z","lastTransitionTime":"2025-09-29T09:30:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:25 crc kubenswrapper[4779]: I0929 09:30:25.505765 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:25 crc kubenswrapper[4779]: I0929 09:30:25.505841 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:25 crc kubenswrapper[4779]: I0929 09:30:25.505864 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:25 crc kubenswrapper[4779]: I0929 09:30:25.505896 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:25 crc kubenswrapper[4779]: I0929 09:30:25.505977 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:25Z","lastTransitionTime":"2025-09-29T09:30:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:25 crc kubenswrapper[4779]: I0929 09:30:25.608882 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:25 crc kubenswrapper[4779]: I0929 09:30:25.608996 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:25 crc kubenswrapper[4779]: I0929 09:30:25.609022 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:25 crc kubenswrapper[4779]: I0929 09:30:25.609052 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:25 crc kubenswrapper[4779]: I0929 09:30:25.609075 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:25Z","lastTransitionTime":"2025-09-29T09:30:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:25 crc kubenswrapper[4779]: I0929 09:30:25.712702 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:25 crc kubenswrapper[4779]: I0929 09:30:25.712777 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:25 crc kubenswrapper[4779]: I0929 09:30:25.712800 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:25 crc kubenswrapper[4779]: I0929 09:30:25.712832 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:25 crc kubenswrapper[4779]: I0929 09:30:25.712854 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:25Z","lastTransitionTime":"2025-09-29T09:30:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:25 crc kubenswrapper[4779]: I0929 09:30:25.713301 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:30:25 crc kubenswrapper[4779]: I0929 09:30:25.713422 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:30:25 crc kubenswrapper[4779]: E0929 09:30:25.713465 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvlbd" podUID="294a4484-da93-4c37-9ecf-18f68f4ad64d" Sep 29 09:30:25 crc kubenswrapper[4779]: E0929 09:30:25.713649 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 09:30:25 crc kubenswrapper[4779]: I0929 09:30:25.816631 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:25 crc kubenswrapper[4779]: I0929 09:30:25.816714 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:25 crc kubenswrapper[4779]: I0929 09:30:25.816743 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:25 crc kubenswrapper[4779]: I0929 09:30:25.816775 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:25 crc kubenswrapper[4779]: I0929 09:30:25.816802 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:25Z","lastTransitionTime":"2025-09-29T09:30:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:25 crc kubenswrapper[4779]: I0929 09:30:25.919799 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:25 crc kubenswrapper[4779]: I0929 09:30:25.919867 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:25 crc kubenswrapper[4779]: I0929 09:30:25.919884 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:25 crc kubenswrapper[4779]: I0929 09:30:25.919933 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:25 crc kubenswrapper[4779]: I0929 09:30:25.919951 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:25Z","lastTransitionTime":"2025-09-29T09:30:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.023113 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.023185 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.023202 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.023227 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.023246 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:26Z","lastTransitionTime":"2025-09-29T09:30:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.127718 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.127838 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.127869 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.127901 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.127956 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:26Z","lastTransitionTime":"2025-09-29T09:30:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.231115 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.231176 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.231194 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.231221 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.231239 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:26Z","lastTransitionTime":"2025-09-29T09:30:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.334136 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.334217 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.334244 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.334276 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.334294 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:26Z","lastTransitionTime":"2025-09-29T09:30:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.436978 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.437032 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.437050 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.437077 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.437096 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:26Z","lastTransitionTime":"2025-09-29T09:30:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.539176 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.539231 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.539248 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.539271 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.539304 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:26Z","lastTransitionTime":"2025-09-29T09:30:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.641921 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.641970 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.641980 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.641995 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.642006 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:26Z","lastTransitionTime":"2025-09-29T09:30:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.714182 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:30:26 crc kubenswrapper[4779]: E0929 09:30:26.714420 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.714456 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:30:26 crc kubenswrapper[4779]: E0929 09:30:26.715182 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.715972 4779 scope.go:117] "RemoveContainer" containerID="c43f1fb41b70a75fdced863cef4c3e186627418facef4d9e5cf4ff1ea6db6432" Sep 29 09:30:26 crc kubenswrapper[4779]: E0929 09:30:26.716284 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-ncxc4_openshift-ovn-kubernetes(60d71749-dfb5-4095-b11b-b70f1a549b88)\"" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" podUID="60d71749-dfb5-4095-b11b-b70f1a549b88" Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.740252 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2adc78ec8f54811c8fd09a5e558132f9f57501d9a92d4738935148bb4e714eb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:26Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.744415 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.744478 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.744505 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.744534 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.744555 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:26Z","lastTransitionTime":"2025-09-29T09:30:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.756098 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5584" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"564bff56-93cb-42ac-bd34-bbe97f99f411\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedbde6cf29bb77c762d7da2d2e414d3e7c2c6edb3addd97473367b89b2e3fe7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvwtv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5584\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:26Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.771344 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gx6f2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25e887ba-720d-4f7b-9763-5703781fd8bf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a1604b3eb7d80cc28d7d36550e740405d3012bb83109444bf7354793ebbdeb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9lw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gx6f2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:26Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.788813 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-qvlbd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"294a4484-da93-4c37-9ecf-18f68f4ad64d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nsgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nsgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:30:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-qvlbd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:26Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.809327 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:26Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.828103 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2tkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b0e23f7-a478-48e2-a745-193a90e87553\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://944ced544427edda31dc5b05053a08a83a71829a896cb6dba002ca20ee3e56e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5rx8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2tkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:26Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.847987 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.848050 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.848072 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.848100 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.848121 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:26Z","lastTransitionTime":"2025-09-29T09:30:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.849715 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-twvvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3a42c70c25df51312e2e5ede663678cbccf94d50dad43886557024669dd2e82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfe38229bcd948f0f8e1e234c8e4ea16d4dbc8c5c4252f5486048db0c9910412\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfe38229bcd948f0f8e1e234c8e4ea16d4dbc8c5c4252f5486048db0c9910412\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acbf355d9614f64771657a4103437e9c55f3b2b5080282d7ad3af70a5f44ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbf355d9614f64771657a4103437e9c55f3b2b5080282d7ad3af70a5f44ffce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b854329d7e329a9099a032b8e91ab2064164ae0e43e57b61c5c55b42fca2a9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b854329d7e329a9099a032b8e91ab2064164ae0e43e57b61c5c55b42fca2a9ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93312b300c9348ca8caa0f12bd7825d6b7d1d720babd765aa679c570220ac15a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93312b300c9348ca8caa0f12bd7825d6b7d1d720babd765aa679c570220ac15a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-twvvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:26Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.868945 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e291840-9db5-4515-810a-190428263dfb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6f627c2a2c06fbe034514d02939db8b7576b8cb753aa65a45a271482cf138c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde5335a7ca2db0e56ded3f2fc58506882df09718e6ada6dd1849b891ebc3dd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0f81c275d3f99ff02d4d105dfff34249cff38150c1aa6199bdc765773f0fa2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33e68ef001a27a30ef12923f95eaaef45667d18c8dab2997491caf12f595ed0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6837fc3f6d91505858a90e49945041b663c864b4ebcbd2ecc086c3575b47c1f7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T09:29:42Z\\\",\\\"message\\\":\\\"W0929 09:29:31.795697 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 09:29:31.796138 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759138171 cert, and key in /tmp/serving-cert-3245563206/serving-signer.crt, /tmp/serving-cert-3245563206/serving-signer.key\\\\nI0929 09:29:32.157414 1 observer_polling.go:159] Starting file observer\\\\nW0929 09:29:32.160455 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 09:29:32.160736 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 09:29:32.163324 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3245563206/tls.crt::/tmp/serving-cert-3245563206/tls.key\\\\\\\"\\\\nF0929 09:29:42.581603 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55099c1030bd4943e0df0b13b1203c7143f79edf5e02c8353ccd042610e8059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:26Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.884064 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:26Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.900048 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f96f3b3b0028e52c62e9b886d5f9c3347ec85f91ec0e5ef3de48bd216a2c687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f10690e584c904be791611af52b807ee3d20cb5551821042a1c5e71e1f1571d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:26Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.916200 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f75b386997d5f82c8a8136e8758dac2b770b2ef60bec255c70978046c3e6cd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:26Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.949266 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0846537-9071-453d-968b-7537a7233656\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://103d18548d558c96520d7c4c31d3ef44abc2905afa5c26ca3f93d2a1cd6ff604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ce06a39c8fd0fba9a0bd1af96ff34cfb10a153801ecff617db4df5e64eb476b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe3edd15b34d8223ba98808b34bee098cb2ffa1ac4fd3e949007acc04303e370\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22cc6e6e78a7eaac01a6c53d5e5527ef3120c8c44bf28813e65038f939e79872\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cc3416bb385ad0ff953b03c268f186747c47a2dfdf11ba34a3b00be5391e90e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8442bd56bf7c2e6708a3b0f6c21ba803d9bc76a84d32647b507d5961a78e8621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8442bd56bf7c2e6708a3b0f6c21ba803d9bc76a84d32647b507d5961a78e8621\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b03f58ad046bc93040f00ae10e8f265099417fcb84c10b4ca1707d6744729648\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b03f58ad046bc93040f00ae10e8f265099417fcb84c10b4ca1707d6744729648\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7e2abd0b0dc5d3b95c510100e11f7f2fa4b05786325b692244ea51633d2f597e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e2abd0b0dc5d3b95c510100e11f7f2fa4b05786325b692244ea51633d2f597e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:26Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.951172 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.951219 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.951235 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.951258 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.951275 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:26Z","lastTransitionTime":"2025-09-29T09:30:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.962583 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19a67159-61a5-4f75-940a-212c850bd498\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ff866eb8fdb247e8bfe638be9525a31733ed912047dcbbc7514ab6324675b6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a69616a6ca151dfda518aae66819aac108f73967794661b878db2480ffbb5b16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4bb91e73e9e2666ab82d5a13fee96ed7406f6178863603e607766ae3172d3ee6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2148aa80d9f71c8424530711e10af5b0bd10437655123ac57b87d1c90a280233\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:26Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:26 crc kubenswrapper[4779]: I0929 09:30:26.978552 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1a5d3a7-37d9-4a87-864c-e4af7f504a19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462a76735c6e380ec32967ba87ebe13a19e1d3687ac87182f01b23ce8fabe8e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7979507aa9ce4c5e1c0f9a0d07716433ab9cebcc02e8b6ba01251ced0bab99f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5lnlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:26Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:27 crc kubenswrapper[4779]: I0929 09:30:27.008662 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60d71749-dfb5-4095-b11b-b70f1a549b88\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a334fd50a045f97d97f67b45abf88dc6aaf3d7c2121becc6a9744c86c977b070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d79814aeee8cb50cbe9a3153ed1a99a82921ac740553c82f6aafd1d7f628cbdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7851ecb6e73d56037db56d7d12fec0a095403080daa7bbf0d4cd79d9a4fd1bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa8dee8aadd625828a22e22668d8f7295ddc3fe24976be6c8610da1157e28d7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a11183ba612a838b92d401b13a2e0d7d1c19de533ecef56b4305e5a9f0784bb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d6677148b1efa21f5eee1dcbccef6d6ea0492513bfa621b1a862488653bc8f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c43f1fb41b70a75fdced863cef4c3e186627418facef4d9e5cf4ff1ea6db6432\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c43f1fb41b70a75fdced863cef4c3e186627418facef4d9e5cf4ff1ea6db6432\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T09:30:11Z\\\",\\\"message\\\":\\\"o start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:11Z is after 2025-08-24T17:21:41Z]\\\\nI0929 09:30:11.835139 6393 ovn.go:134] Ensuring zone local for Pod openshift-image-registry/node-ca-gx6f2 in node crc\\\\nI0929 09:30:11.835086 6393 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-network-console/networking-console-plugin_TCP_cluster\\\\\\\", UUID:\\\\\\\"ab0b1d51-5ec6-479b-8881-93dfa8d30337\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-network-console/networking-console-plugin\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-network-console/networking-console-plugin_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Pro\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:30:11Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-ncxc4_openshift-ovn-kubernetes(60d71749-dfb5-4095-b11b-b70f1a549b88)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e140d46db39380a5737d78e1aa00ad1a5a9cc217de910ca4212beae87764795\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ncxc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:27Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:27 crc kubenswrapper[4779]: I0929 09:30:27.027007 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fxjq2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c469d18e-5b53-4a41-9305-87ae2f8db671\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c8ba3507185bd18ea878f73feed6ba33fb15a49a9f2575a66a88ac28fedef14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:30:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqdj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://01f30b5162df9f09f1af03ebeb5f7218a68b49bf4ff38cd06f3dad13d5042f88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:30:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqdj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:30:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fxjq2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:27Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:27 crc kubenswrapper[4779]: I0929 09:30:27.045619 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d52ae941-3523-4957-aa78-7c744b7a14d3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://813ea0b64e4e968f7808a4f272fe8c987b24c23a26e2fd7e1fe2f2bbd3b0498e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://def414c582c214da578be9ef1b899f09185c7b6318cd6b191920d6293e89873f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e18a438c1a81da7779de1fc69aeff90acf287ced68bb63bf1faac1ba595ec528\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aeb35312a74983b7c72a2349d1b461968b67ae4cf725beeaa8699fd1c9c5845e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aeb35312a74983b7c72a2349d1b461968b67ae4cf725beeaa8699fd1c9c5845e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:27Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:27 crc kubenswrapper[4779]: I0929 09:30:27.055005 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:27 crc kubenswrapper[4779]: I0929 09:30:27.055063 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:27 crc kubenswrapper[4779]: I0929 09:30:27.055088 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:27 crc kubenswrapper[4779]: I0929 09:30:27.055122 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:27 crc kubenswrapper[4779]: I0929 09:30:27.055144 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:27Z","lastTransitionTime":"2025-09-29T09:30:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:27 crc kubenswrapper[4779]: I0929 09:30:27.066010 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:27Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:27 crc kubenswrapper[4779]: I0929 09:30:27.158080 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:27 crc kubenswrapper[4779]: I0929 09:30:27.158135 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:27 crc kubenswrapper[4779]: I0929 09:30:27.158147 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:27 crc kubenswrapper[4779]: I0929 09:30:27.158165 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:27 crc kubenswrapper[4779]: I0929 09:30:27.158180 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:27Z","lastTransitionTime":"2025-09-29T09:30:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:27 crc kubenswrapper[4779]: I0929 09:30:27.261185 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:27 crc kubenswrapper[4779]: I0929 09:30:27.261230 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:27 crc kubenswrapper[4779]: I0929 09:30:27.261246 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:27 crc kubenswrapper[4779]: I0929 09:30:27.261268 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:27 crc kubenswrapper[4779]: I0929 09:30:27.261282 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:27Z","lastTransitionTime":"2025-09-29T09:30:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:27 crc kubenswrapper[4779]: I0929 09:30:27.364486 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:27 crc kubenswrapper[4779]: I0929 09:30:27.364556 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:27 crc kubenswrapper[4779]: I0929 09:30:27.364575 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:27 crc kubenswrapper[4779]: I0929 09:30:27.364598 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:27 crc kubenswrapper[4779]: I0929 09:30:27.364614 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:27Z","lastTransitionTime":"2025-09-29T09:30:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:27 crc kubenswrapper[4779]: I0929 09:30:27.467517 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:27 crc kubenswrapper[4779]: I0929 09:30:27.467572 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:27 crc kubenswrapper[4779]: I0929 09:30:27.467589 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:27 crc kubenswrapper[4779]: I0929 09:30:27.467611 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:27 crc kubenswrapper[4779]: I0929 09:30:27.467629 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:27Z","lastTransitionTime":"2025-09-29T09:30:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:27 crc kubenswrapper[4779]: I0929 09:30:27.571314 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:27 crc kubenswrapper[4779]: I0929 09:30:27.571373 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:27 crc kubenswrapper[4779]: I0929 09:30:27.571389 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:27 crc kubenswrapper[4779]: I0929 09:30:27.571414 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:27 crc kubenswrapper[4779]: I0929 09:30:27.571431 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:27Z","lastTransitionTime":"2025-09-29T09:30:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:27 crc kubenswrapper[4779]: I0929 09:30:27.674234 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:27 crc kubenswrapper[4779]: I0929 09:30:27.674314 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:27 crc kubenswrapper[4779]: I0929 09:30:27.674339 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:27 crc kubenswrapper[4779]: I0929 09:30:27.674367 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:27 crc kubenswrapper[4779]: I0929 09:30:27.674391 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:27Z","lastTransitionTime":"2025-09-29T09:30:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:27 crc kubenswrapper[4779]: I0929 09:30:27.716335 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:30:27 crc kubenswrapper[4779]: E0929 09:30:27.716584 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 09:30:27 crc kubenswrapper[4779]: I0929 09:30:27.716644 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:30:27 crc kubenswrapper[4779]: E0929 09:30:27.716881 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvlbd" podUID="294a4484-da93-4c37-9ecf-18f68f4ad64d" Sep 29 09:30:27 crc kubenswrapper[4779]: I0929 09:30:27.777305 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:27 crc kubenswrapper[4779]: I0929 09:30:27.777360 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:27 crc kubenswrapper[4779]: I0929 09:30:27.777375 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:27 crc kubenswrapper[4779]: I0929 09:30:27.777396 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:27 crc kubenswrapper[4779]: I0929 09:30:27.777412 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:27Z","lastTransitionTime":"2025-09-29T09:30:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:27 crc kubenswrapper[4779]: I0929 09:30:27.880859 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:27 crc kubenswrapper[4779]: I0929 09:30:27.880963 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:27 crc kubenswrapper[4779]: I0929 09:30:27.880982 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:27 crc kubenswrapper[4779]: I0929 09:30:27.881084 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:27 crc kubenswrapper[4779]: I0929 09:30:27.881106 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:27Z","lastTransitionTime":"2025-09-29T09:30:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.000658 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.000685 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.000694 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.000707 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.000715 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:28Z","lastTransitionTime":"2025-09-29T09:30:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.104409 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.104450 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.104461 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.104480 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.104493 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:28Z","lastTransitionTime":"2025-09-29T09:30:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.207091 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.207158 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.207175 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.207203 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.207220 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:28Z","lastTransitionTime":"2025-09-29T09:30:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.310312 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.310414 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.310439 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.310468 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.310489 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:28Z","lastTransitionTime":"2025-09-29T09:30:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.413666 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.413705 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.413720 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.413739 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.413756 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:28Z","lastTransitionTime":"2025-09-29T09:30:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.517003 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.517067 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.517084 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.517110 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.517127 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:28Z","lastTransitionTime":"2025-09-29T09:30:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.620420 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.620461 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.620470 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.620485 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.620495 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:28Z","lastTransitionTime":"2025-09-29T09:30:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.708157 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.708588 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.708612 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.708646 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.708668 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:28Z","lastTransitionTime":"2025-09-29T09:30:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.713298 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:30:28 crc kubenswrapper[4779]: E0929 09:30:28.713497 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.713861 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:30:28 crc kubenswrapper[4779]: E0929 09:30:28.714025 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 09:30:28 crc kubenswrapper[4779]: E0929 09:30:28.735089 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"25d3a4e6-deea-47ab-ac6b-f80ccadc03c7\\\",\\\"systemUUID\\\":\\\"6af97324-aa9b-4cb6-ab41-66056c52c25a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:28Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.738374 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:28Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.741309 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.741698 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.741899 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.742104 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.742250 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:28Z","lastTransitionTime":"2025-09-29T09:30:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:28 crc kubenswrapper[4779]: E0929 09:30:28.756872 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"25d3a4e6-deea-47ab-ac6b-f80ccadc03c7\\\",\\\"systemUUID\\\":\\\"6af97324-aa9b-4cb6-ab41-66056c52c25a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:28Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.758626 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2tkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b0e23f7-a478-48e2-a745-193a90e87553\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://944ced544427edda31dc5b05053a08a83a71829a896cb6dba002ca20ee3e56e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5rx8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2tkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:28Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.762615 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.762663 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.762682 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.762706 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.762725 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:28Z","lastTransitionTime":"2025-09-29T09:30:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:28 crc kubenswrapper[4779]: E0929 09:30:28.782936 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"25d3a4e6-deea-47ab-ac6b-f80ccadc03c7\\\",\\\"systemUUID\\\":\\\"6af97324-aa9b-4cb6-ab41-66056c52c25a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:28Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.784497 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-twvvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3a42c70c25df51312e2e5ede663678cbccf94d50dad43886557024669dd2e82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfe38229bcd948f0f8e1e234c8e4ea16d4dbc8c5c4252f5486048db0c9910412\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfe38229bcd948f0f8e1e234c8e4ea16d4dbc8c5c4252f5486048db0c9910412\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acbf355d9614f64771657a4103437e9c55f3b2b5080282d7ad3af70a5f44ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbf355d9614f64771657a4103437e9c55f3b2b5080282d7ad3af70a5f44ffce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b854329d7e329a9099a032b8e91ab2064164ae0e43e57b61c5c55b42fca2a9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b854329d7e329a9099a032b8e91ab2064164ae0e43e57b61c5c55b42fca2a9ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93312b300c9348ca8caa0f12bd7825d6b7d1d720babd765aa679c570220ac15a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93312b300c9348ca8caa0f12bd7825d6b7d1d720babd765aa679c570220ac15a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-twvvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:28Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.788542 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.788575 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.788585 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.788609 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.788623 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:28Z","lastTransitionTime":"2025-09-29T09:30:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.805276 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e291840-9db5-4515-810a-190428263dfb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6f627c2a2c06fbe034514d02939db8b7576b8cb753aa65a45a271482cf138c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde5335a7ca2db0e56ded3f2fc58506882df09718e6ada6dd1849b891ebc3dd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0f81c275d3f99ff02d4d105dfff34249cff38150c1aa6199bdc765773f0fa2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33e68ef001a27a30ef12923f95eaaef45667d18c8dab2997491caf12f595ed0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6837fc3f6d91505858a90e49945041b663c864b4ebcbd2ecc086c3575b47c1f7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T09:29:42Z\\\",\\\"message\\\":\\\"W0929 09:29:31.795697 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 09:29:31.796138 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759138171 cert, and key in /tmp/serving-cert-3245563206/serving-signer.crt, /tmp/serving-cert-3245563206/serving-signer.key\\\\nI0929 09:29:32.157414 1 observer_polling.go:159] Starting file observer\\\\nW0929 09:29:32.160455 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 09:29:32.160736 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 09:29:32.163324 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3245563206/tls.crt::/tmp/serving-cert-3245563206/tls.key\\\\\\\"\\\\nF0929 09:29:42.581603 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55099c1030bd4943e0df0b13b1203c7143f79edf5e02c8353ccd042610e8059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:28Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:28 crc kubenswrapper[4779]: E0929 09:30:28.805525 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"25d3a4e6-deea-47ab-ac6b-f80ccadc03c7\\\",\\\"systemUUID\\\":\\\"6af97324-aa9b-4cb6-ab41-66056c52c25a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:28Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.812705 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.812804 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.812820 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.812837 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.812849 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:28Z","lastTransitionTime":"2025-09-29T09:30:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.830863 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:28Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:28 crc kubenswrapper[4779]: E0929 09:30:28.839569 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"25d3a4e6-deea-47ab-ac6b-f80ccadc03c7\\\",\\\"systemUUID\\\":\\\"6af97324-aa9b-4cb6-ab41-66056c52c25a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:28Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:28 crc kubenswrapper[4779]: E0929 09:30:28.839743 4779 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.840773 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.840816 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.840829 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.840848 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.840858 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:28Z","lastTransitionTime":"2025-09-29T09:30:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.857294 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f96f3b3b0028e52c62e9b886d5f9c3347ec85f91ec0e5ef3de48bd216a2c687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f10690e584c904be791611af52b807ee3d20cb5551821042a1c5e71e1f1571d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:28Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.879068 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f75b386997d5f82c8a8136e8758dac2b770b2ef60bec255c70978046c3e6cd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:28Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.902983 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0846537-9071-453d-968b-7537a7233656\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://103d18548d558c96520d7c4c31d3ef44abc2905afa5c26ca3f93d2a1cd6ff604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ce06a39c8fd0fba9a0bd1af96ff34cfb10a153801ecff617db4df5e64eb476b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe3edd15b34d8223ba98808b34bee098cb2ffa1ac4fd3e949007acc04303e370\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22cc6e6e78a7eaac01a6c53d5e5527ef3120c8c44bf28813e65038f939e79872\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cc3416bb385ad0ff953b03c268f186747c47a2dfdf11ba34a3b00be5391e90e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8442bd56bf7c2e6708a3b0f6c21ba803d9bc76a84d32647b507d5961a78e8621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8442bd56bf7c2e6708a3b0f6c21ba803d9bc76a84d32647b507d5961a78e8621\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b03f58ad046bc93040f00ae10e8f265099417fcb84c10b4ca1707d6744729648\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b03f58ad046bc93040f00ae10e8f265099417fcb84c10b4ca1707d6744729648\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7e2abd0b0dc5d3b95c510100e11f7f2fa4b05786325b692244ea51633d2f597e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e2abd0b0dc5d3b95c510100e11f7f2fa4b05786325b692244ea51633d2f597e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:28Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.920135 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19a67159-61a5-4f75-940a-212c850bd498\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ff866eb8fdb247e8bfe638be9525a31733ed912047dcbbc7514ab6324675b6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a69616a6ca151dfda518aae66819aac108f73967794661b878db2480ffbb5b16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4bb91e73e9e2666ab82d5a13fee96ed7406f6178863603e607766ae3172d3ee6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2148aa80d9f71c8424530711e10af5b0bd10437655123ac57b87d1c90a280233\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:28Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.937693 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1a5d3a7-37d9-4a87-864c-e4af7f504a19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462a76735c6e380ec32967ba87ebe13a19e1d3687ac87182f01b23ce8fabe8e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7979507aa9ce4c5e1c0f9a0d07716433ab9cebcc02e8b6ba01251ced0bab99f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5lnlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:28Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.944671 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.944731 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.944746 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.944769 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.944785 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:28Z","lastTransitionTime":"2025-09-29T09:30:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.965120 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60d71749-dfb5-4095-b11b-b70f1a549b88\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a334fd50a045f97d97f67b45abf88dc6aaf3d7c2121becc6a9744c86c977b070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d79814aeee8cb50cbe9a3153ed1a99a82921ac740553c82f6aafd1d7f628cbdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7851ecb6e73d56037db56d7d12fec0a095403080daa7bbf0d4cd79d9a4fd1bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa8dee8aadd625828a22e22668d8f7295ddc3fe24976be6c8610da1157e28d7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a11183ba612a838b92d401b13a2e0d7d1c19de533ecef56b4305e5a9f0784bb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d6677148b1efa21f5eee1dcbccef6d6ea0492513bfa621b1a862488653bc8f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c43f1fb41b70a75fdced863cef4c3e186627418facef4d9e5cf4ff1ea6db6432\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c43f1fb41b70a75fdced863cef4c3e186627418facef4d9e5cf4ff1ea6db6432\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T09:30:11Z\\\",\\\"message\\\":\\\"o start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:11Z is after 2025-08-24T17:21:41Z]\\\\nI0929 09:30:11.835139 6393 ovn.go:134] Ensuring zone local for Pod openshift-image-registry/node-ca-gx6f2 in node crc\\\\nI0929 09:30:11.835086 6393 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-network-console/networking-console-plugin_TCP_cluster\\\\\\\", UUID:\\\\\\\"ab0b1d51-5ec6-479b-8881-93dfa8d30337\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-network-console/networking-console-plugin\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-network-console/networking-console-plugin_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Pro\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:30:11Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-ncxc4_openshift-ovn-kubernetes(60d71749-dfb5-4095-b11b-b70f1a549b88)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e140d46db39380a5737d78e1aa00ad1a5a9cc217de910ca4212beae87764795\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ncxc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:28Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.979499 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fxjq2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c469d18e-5b53-4a41-9305-87ae2f8db671\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c8ba3507185bd18ea878f73feed6ba33fb15a49a9f2575a66a88ac28fedef14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:30:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqdj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://01f30b5162df9f09f1af03ebeb5f7218a68b49bf4ff38cd06f3dad13d5042f88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:30:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqdj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:30:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fxjq2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:28Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:28 crc kubenswrapper[4779]: I0929 09:30:28.994077 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d52ae941-3523-4957-aa78-7c744b7a14d3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://813ea0b64e4e968f7808a4f272fe8c987b24c23a26e2fd7e1fe2f2bbd3b0498e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://def414c582c214da578be9ef1b899f09185c7b6318cd6b191920d6293e89873f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e18a438c1a81da7779de1fc69aeff90acf287ced68bb63bf1faac1ba595ec528\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aeb35312a74983b7c72a2349d1b461968b67ae4cf725beeaa8699fd1c9c5845e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aeb35312a74983b7c72a2349d1b461968b67ae4cf725beeaa8699fd1c9c5845e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:28Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:29 crc kubenswrapper[4779]: I0929 09:30:29.007878 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:29Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:29 crc kubenswrapper[4779]: I0929 09:30:29.022305 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2adc78ec8f54811c8fd09a5e558132f9f57501d9a92d4738935148bb4e714eb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:29Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:29 crc kubenswrapper[4779]: I0929 09:30:29.036445 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5584" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"564bff56-93cb-42ac-bd34-bbe97f99f411\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedbde6cf29bb77c762d7da2d2e414d3e7c2c6edb3addd97473367b89b2e3fe7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvwtv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5584\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:29Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:29 crc kubenswrapper[4779]: I0929 09:30:29.047709 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:29 crc kubenswrapper[4779]: I0929 09:30:29.047750 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:29 crc kubenswrapper[4779]: I0929 09:30:29.047763 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:29 crc kubenswrapper[4779]: I0929 09:30:29.047782 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:29 crc kubenswrapper[4779]: I0929 09:30:29.047795 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:29Z","lastTransitionTime":"2025-09-29T09:30:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:29 crc kubenswrapper[4779]: I0929 09:30:29.052072 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gx6f2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25e887ba-720d-4f7b-9763-5703781fd8bf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a1604b3eb7d80cc28d7d36550e740405d3012bb83109444bf7354793ebbdeb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9lw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gx6f2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:29Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:29 crc kubenswrapper[4779]: I0929 09:30:29.067206 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-qvlbd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"294a4484-da93-4c37-9ecf-18f68f4ad64d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nsgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nsgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:30:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-qvlbd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:29Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:29 crc kubenswrapper[4779]: I0929 09:30:29.149838 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:29 crc kubenswrapper[4779]: I0929 09:30:29.149877 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:29 crc kubenswrapper[4779]: I0929 09:30:29.149889 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:29 crc kubenswrapper[4779]: I0929 09:30:29.149909 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:29 crc kubenswrapper[4779]: I0929 09:30:29.149937 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:29Z","lastTransitionTime":"2025-09-29T09:30:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:29 crc kubenswrapper[4779]: I0929 09:30:29.252020 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:29 crc kubenswrapper[4779]: I0929 09:30:29.252064 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:29 crc kubenswrapper[4779]: I0929 09:30:29.252076 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:29 crc kubenswrapper[4779]: I0929 09:30:29.252095 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:29 crc kubenswrapper[4779]: I0929 09:30:29.252108 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:29Z","lastTransitionTime":"2025-09-29T09:30:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:29 crc kubenswrapper[4779]: I0929 09:30:29.356653 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:29 crc kubenswrapper[4779]: I0929 09:30:29.357297 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:29 crc kubenswrapper[4779]: I0929 09:30:29.357383 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:29 crc kubenswrapper[4779]: I0929 09:30:29.357462 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:29 crc kubenswrapper[4779]: I0929 09:30:29.357524 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:29Z","lastTransitionTime":"2025-09-29T09:30:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:29 crc kubenswrapper[4779]: I0929 09:30:29.460374 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:29 crc kubenswrapper[4779]: I0929 09:30:29.460421 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:29 crc kubenswrapper[4779]: I0929 09:30:29.460433 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:29 crc kubenswrapper[4779]: I0929 09:30:29.460459 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:29 crc kubenswrapper[4779]: I0929 09:30:29.460471 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:29Z","lastTransitionTime":"2025-09-29T09:30:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:29 crc kubenswrapper[4779]: I0929 09:30:29.563376 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:29 crc kubenswrapper[4779]: I0929 09:30:29.563438 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:29 crc kubenswrapper[4779]: I0929 09:30:29.563457 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:29 crc kubenswrapper[4779]: I0929 09:30:29.563484 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:29 crc kubenswrapper[4779]: I0929 09:30:29.563503 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:29Z","lastTransitionTime":"2025-09-29T09:30:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:29 crc kubenswrapper[4779]: I0929 09:30:29.667384 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:29 crc kubenswrapper[4779]: I0929 09:30:29.667452 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:29 crc kubenswrapper[4779]: I0929 09:30:29.667474 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:29 crc kubenswrapper[4779]: I0929 09:30:29.667508 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:29 crc kubenswrapper[4779]: I0929 09:30:29.667531 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:29Z","lastTransitionTime":"2025-09-29T09:30:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:29 crc kubenswrapper[4779]: I0929 09:30:29.714193 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:30:29 crc kubenswrapper[4779]: I0929 09:30:29.714224 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:30:29 crc kubenswrapper[4779]: E0929 09:30:29.714410 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 09:30:29 crc kubenswrapper[4779]: E0929 09:30:29.714551 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvlbd" podUID="294a4484-da93-4c37-9ecf-18f68f4ad64d" Sep 29 09:30:29 crc kubenswrapper[4779]: I0929 09:30:29.770655 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:29 crc kubenswrapper[4779]: I0929 09:30:29.770753 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:29 crc kubenswrapper[4779]: I0929 09:30:29.770777 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:29 crc kubenswrapper[4779]: I0929 09:30:29.770794 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:29 crc kubenswrapper[4779]: I0929 09:30:29.770807 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:29Z","lastTransitionTime":"2025-09-29T09:30:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:29 crc kubenswrapper[4779]: I0929 09:30:29.873499 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:29 crc kubenswrapper[4779]: I0929 09:30:29.873596 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:29 crc kubenswrapper[4779]: I0929 09:30:29.873642 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:29 crc kubenswrapper[4779]: I0929 09:30:29.873746 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:29 crc kubenswrapper[4779]: I0929 09:30:29.873768 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:29Z","lastTransitionTime":"2025-09-29T09:30:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:29 crc kubenswrapper[4779]: I0929 09:30:29.977033 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:29 crc kubenswrapper[4779]: I0929 09:30:29.977095 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:29 crc kubenswrapper[4779]: I0929 09:30:29.977112 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:29 crc kubenswrapper[4779]: I0929 09:30:29.977137 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:29 crc kubenswrapper[4779]: I0929 09:30:29.977154 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:29Z","lastTransitionTime":"2025-09-29T09:30:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:30 crc kubenswrapper[4779]: I0929 09:30:30.080461 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:30 crc kubenswrapper[4779]: I0929 09:30:30.080518 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:30 crc kubenswrapper[4779]: I0929 09:30:30.080540 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:30 crc kubenswrapper[4779]: I0929 09:30:30.080563 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:30 crc kubenswrapper[4779]: I0929 09:30:30.080579 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:30Z","lastTransitionTime":"2025-09-29T09:30:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:30 crc kubenswrapper[4779]: I0929 09:30:30.184252 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:30 crc kubenswrapper[4779]: I0929 09:30:30.184311 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:30 crc kubenswrapper[4779]: I0929 09:30:30.184328 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:30 crc kubenswrapper[4779]: I0929 09:30:30.184356 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:30 crc kubenswrapper[4779]: I0929 09:30:30.184373 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:30Z","lastTransitionTime":"2025-09-29T09:30:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:30 crc kubenswrapper[4779]: I0929 09:30:30.287261 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:30 crc kubenswrapper[4779]: I0929 09:30:30.287295 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:30 crc kubenswrapper[4779]: I0929 09:30:30.287304 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:30 crc kubenswrapper[4779]: I0929 09:30:30.287321 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:30 crc kubenswrapper[4779]: I0929 09:30:30.287331 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:30Z","lastTransitionTime":"2025-09-29T09:30:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:30 crc kubenswrapper[4779]: I0929 09:30:30.390365 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:30 crc kubenswrapper[4779]: I0929 09:30:30.390476 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:30 crc kubenswrapper[4779]: I0929 09:30:30.390495 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:30 crc kubenswrapper[4779]: I0929 09:30:30.390522 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:30 crc kubenswrapper[4779]: I0929 09:30:30.390542 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:30Z","lastTransitionTime":"2025-09-29T09:30:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:30 crc kubenswrapper[4779]: I0929 09:30:30.493678 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:30 crc kubenswrapper[4779]: I0929 09:30:30.493735 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:30 crc kubenswrapper[4779]: I0929 09:30:30.493747 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:30 crc kubenswrapper[4779]: I0929 09:30:30.493764 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:30 crc kubenswrapper[4779]: I0929 09:30:30.493778 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:30Z","lastTransitionTime":"2025-09-29T09:30:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:30 crc kubenswrapper[4779]: I0929 09:30:30.596990 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:30 crc kubenswrapper[4779]: I0929 09:30:30.597080 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:30 crc kubenswrapper[4779]: I0929 09:30:30.597129 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:30 crc kubenswrapper[4779]: I0929 09:30:30.597161 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:30 crc kubenswrapper[4779]: I0929 09:30:30.597178 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:30Z","lastTransitionTime":"2025-09-29T09:30:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:30 crc kubenswrapper[4779]: I0929 09:30:30.700633 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:30 crc kubenswrapper[4779]: I0929 09:30:30.700688 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:30 crc kubenswrapper[4779]: I0929 09:30:30.700704 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:30 crc kubenswrapper[4779]: I0929 09:30:30.700727 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:30 crc kubenswrapper[4779]: I0929 09:30:30.700764 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:30Z","lastTransitionTime":"2025-09-29T09:30:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:30 crc kubenswrapper[4779]: I0929 09:30:30.713804 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:30:30 crc kubenswrapper[4779]: I0929 09:30:30.713902 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:30:30 crc kubenswrapper[4779]: E0929 09:30:30.713982 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 09:30:30 crc kubenswrapper[4779]: E0929 09:30:30.714063 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 09:30:30 crc kubenswrapper[4779]: I0929 09:30:30.802890 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:30 crc kubenswrapper[4779]: I0929 09:30:30.803010 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:30 crc kubenswrapper[4779]: I0929 09:30:30.803030 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:30 crc kubenswrapper[4779]: I0929 09:30:30.803053 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:30 crc kubenswrapper[4779]: I0929 09:30:30.803071 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:30Z","lastTransitionTime":"2025-09-29T09:30:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:30 crc kubenswrapper[4779]: I0929 09:30:30.905957 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:30 crc kubenswrapper[4779]: I0929 09:30:30.905990 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:30 crc kubenswrapper[4779]: I0929 09:30:30.905998 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:30 crc kubenswrapper[4779]: I0929 09:30:30.906075 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:30 crc kubenswrapper[4779]: I0929 09:30:30.906092 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:30Z","lastTransitionTime":"2025-09-29T09:30:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:31 crc kubenswrapper[4779]: I0929 09:30:31.009371 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:31 crc kubenswrapper[4779]: I0929 09:30:31.009479 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:31 crc kubenswrapper[4779]: I0929 09:30:31.009499 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:31 crc kubenswrapper[4779]: I0929 09:30:31.009525 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:31 crc kubenswrapper[4779]: I0929 09:30:31.009545 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:31Z","lastTransitionTime":"2025-09-29T09:30:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:31 crc kubenswrapper[4779]: I0929 09:30:31.112701 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:31 crc kubenswrapper[4779]: I0929 09:30:31.112778 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:31 crc kubenswrapper[4779]: I0929 09:30:31.112802 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:31 crc kubenswrapper[4779]: I0929 09:30:31.112840 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:31 crc kubenswrapper[4779]: I0929 09:30:31.112864 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:31Z","lastTransitionTime":"2025-09-29T09:30:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:31 crc kubenswrapper[4779]: I0929 09:30:31.215488 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:31 crc kubenswrapper[4779]: I0929 09:30:31.215551 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:31 crc kubenswrapper[4779]: I0929 09:30:31.215570 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:31 crc kubenswrapper[4779]: I0929 09:30:31.215596 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:31 crc kubenswrapper[4779]: I0929 09:30:31.215614 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:31Z","lastTransitionTime":"2025-09-29T09:30:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:31 crc kubenswrapper[4779]: I0929 09:30:31.318543 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:31 crc kubenswrapper[4779]: I0929 09:30:31.318593 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:31 crc kubenswrapper[4779]: I0929 09:30:31.318603 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:31 crc kubenswrapper[4779]: I0929 09:30:31.318621 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:31 crc kubenswrapper[4779]: I0929 09:30:31.318631 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:31Z","lastTransitionTime":"2025-09-29T09:30:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:31 crc kubenswrapper[4779]: I0929 09:30:31.421576 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:31 crc kubenswrapper[4779]: I0929 09:30:31.421616 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:31 crc kubenswrapper[4779]: I0929 09:30:31.421625 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:31 crc kubenswrapper[4779]: I0929 09:30:31.421639 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:31 crc kubenswrapper[4779]: I0929 09:30:31.421648 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:31Z","lastTransitionTime":"2025-09-29T09:30:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:31 crc kubenswrapper[4779]: I0929 09:30:31.524478 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:31 crc kubenswrapper[4779]: I0929 09:30:31.524525 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:31 crc kubenswrapper[4779]: I0929 09:30:31.524539 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:31 crc kubenswrapper[4779]: I0929 09:30:31.524557 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:31 crc kubenswrapper[4779]: I0929 09:30:31.524568 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:31Z","lastTransitionTime":"2025-09-29T09:30:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:31 crc kubenswrapper[4779]: I0929 09:30:31.627682 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:31 crc kubenswrapper[4779]: I0929 09:30:31.627735 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:31 crc kubenswrapper[4779]: I0929 09:30:31.627751 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:31 crc kubenswrapper[4779]: I0929 09:30:31.627773 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:31 crc kubenswrapper[4779]: I0929 09:30:31.627789 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:31Z","lastTransitionTime":"2025-09-29T09:30:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:31 crc kubenswrapper[4779]: I0929 09:30:31.713514 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:30:31 crc kubenswrapper[4779]: I0929 09:30:31.713583 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:30:31 crc kubenswrapper[4779]: E0929 09:30:31.713750 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 09:30:31 crc kubenswrapper[4779]: E0929 09:30:31.714010 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvlbd" podUID="294a4484-da93-4c37-9ecf-18f68f4ad64d" Sep 29 09:30:31 crc kubenswrapper[4779]: I0929 09:30:31.730573 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:31 crc kubenswrapper[4779]: I0929 09:30:31.730646 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:31 crc kubenswrapper[4779]: I0929 09:30:31.730669 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:31 crc kubenswrapper[4779]: I0929 09:30:31.730697 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:31 crc kubenswrapper[4779]: I0929 09:30:31.730719 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:31Z","lastTransitionTime":"2025-09-29T09:30:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:31 crc kubenswrapper[4779]: I0929 09:30:31.833423 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:31 crc kubenswrapper[4779]: I0929 09:30:31.833452 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:31 crc kubenswrapper[4779]: I0929 09:30:31.833461 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:31 crc kubenswrapper[4779]: I0929 09:30:31.833476 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:31 crc kubenswrapper[4779]: I0929 09:30:31.833484 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:31Z","lastTransitionTime":"2025-09-29T09:30:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:31 crc kubenswrapper[4779]: I0929 09:30:31.939016 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:31 crc kubenswrapper[4779]: I0929 09:30:31.939073 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:31 crc kubenswrapper[4779]: I0929 09:30:31.939090 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:31 crc kubenswrapper[4779]: I0929 09:30:31.939112 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:31 crc kubenswrapper[4779]: I0929 09:30:31.939130 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:31Z","lastTransitionTime":"2025-09-29T09:30:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:32 crc kubenswrapper[4779]: I0929 09:30:32.041523 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:32 crc kubenswrapper[4779]: I0929 09:30:32.041570 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:32 crc kubenswrapper[4779]: I0929 09:30:32.041586 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:32 crc kubenswrapper[4779]: I0929 09:30:32.041610 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:32 crc kubenswrapper[4779]: I0929 09:30:32.041626 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:32Z","lastTransitionTime":"2025-09-29T09:30:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:32 crc kubenswrapper[4779]: I0929 09:30:32.143757 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:32 crc kubenswrapper[4779]: I0929 09:30:32.143816 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:32 crc kubenswrapper[4779]: I0929 09:30:32.143838 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:32 crc kubenswrapper[4779]: I0929 09:30:32.143866 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:32 crc kubenswrapper[4779]: I0929 09:30:32.143889 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:32Z","lastTransitionTime":"2025-09-29T09:30:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:32 crc kubenswrapper[4779]: I0929 09:30:32.246995 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:32 crc kubenswrapper[4779]: I0929 09:30:32.247038 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:32 crc kubenswrapper[4779]: I0929 09:30:32.247050 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:32 crc kubenswrapper[4779]: I0929 09:30:32.247068 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:32 crc kubenswrapper[4779]: I0929 09:30:32.247080 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:32Z","lastTransitionTime":"2025-09-29T09:30:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:32 crc kubenswrapper[4779]: I0929 09:30:32.350027 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:32 crc kubenswrapper[4779]: I0929 09:30:32.350070 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:32 crc kubenswrapper[4779]: I0929 09:30:32.350098 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:32 crc kubenswrapper[4779]: I0929 09:30:32.350113 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:32 crc kubenswrapper[4779]: I0929 09:30:32.350123 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:32Z","lastTransitionTime":"2025-09-29T09:30:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:32 crc kubenswrapper[4779]: I0929 09:30:32.453399 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:32 crc kubenswrapper[4779]: I0929 09:30:32.453459 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:32 crc kubenswrapper[4779]: I0929 09:30:32.453483 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:32 crc kubenswrapper[4779]: I0929 09:30:32.453511 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:32 crc kubenswrapper[4779]: I0929 09:30:32.453528 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:32Z","lastTransitionTime":"2025-09-29T09:30:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:32 crc kubenswrapper[4779]: I0929 09:30:32.556808 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:32 crc kubenswrapper[4779]: I0929 09:30:32.556864 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:32 crc kubenswrapper[4779]: I0929 09:30:32.556879 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:32 crc kubenswrapper[4779]: I0929 09:30:32.556926 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:32 crc kubenswrapper[4779]: I0929 09:30:32.556944 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:32Z","lastTransitionTime":"2025-09-29T09:30:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:32 crc kubenswrapper[4779]: I0929 09:30:32.660048 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:32 crc kubenswrapper[4779]: I0929 09:30:32.660114 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:32 crc kubenswrapper[4779]: I0929 09:30:32.660129 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:32 crc kubenswrapper[4779]: I0929 09:30:32.660148 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:32 crc kubenswrapper[4779]: I0929 09:30:32.660164 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:32Z","lastTransitionTime":"2025-09-29T09:30:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:32 crc kubenswrapper[4779]: I0929 09:30:32.714126 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:30:32 crc kubenswrapper[4779]: E0929 09:30:32.714325 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 09:30:32 crc kubenswrapper[4779]: I0929 09:30:32.714377 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:30:32 crc kubenswrapper[4779]: E0929 09:30:32.714542 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 09:30:32 crc kubenswrapper[4779]: I0929 09:30:32.763488 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:32 crc kubenswrapper[4779]: I0929 09:30:32.763530 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:32 crc kubenswrapper[4779]: I0929 09:30:32.763542 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:32 crc kubenswrapper[4779]: I0929 09:30:32.763559 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:32 crc kubenswrapper[4779]: I0929 09:30:32.763570 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:32Z","lastTransitionTime":"2025-09-29T09:30:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:32 crc kubenswrapper[4779]: I0929 09:30:32.865886 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:32 crc kubenswrapper[4779]: I0929 09:30:32.865960 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:32 crc kubenswrapper[4779]: I0929 09:30:32.865975 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:32 crc kubenswrapper[4779]: I0929 09:30:32.865996 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:32 crc kubenswrapper[4779]: I0929 09:30:32.866010 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:32Z","lastTransitionTime":"2025-09-29T09:30:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:32 crc kubenswrapper[4779]: I0929 09:30:32.968480 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:32 crc kubenswrapper[4779]: I0929 09:30:32.968520 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:32 crc kubenswrapper[4779]: I0929 09:30:32.968533 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:32 crc kubenswrapper[4779]: I0929 09:30:32.968552 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:32 crc kubenswrapper[4779]: I0929 09:30:32.968562 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:32Z","lastTransitionTime":"2025-09-29T09:30:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:33 crc kubenswrapper[4779]: I0929 09:30:33.071315 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:33 crc kubenswrapper[4779]: I0929 09:30:33.071353 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:33 crc kubenswrapper[4779]: I0929 09:30:33.071362 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:33 crc kubenswrapper[4779]: I0929 09:30:33.071378 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:33 crc kubenswrapper[4779]: I0929 09:30:33.071388 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:33Z","lastTransitionTime":"2025-09-29T09:30:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:33 crc kubenswrapper[4779]: I0929 09:30:33.174027 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:33 crc kubenswrapper[4779]: I0929 09:30:33.174065 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:33 crc kubenswrapper[4779]: I0929 09:30:33.174076 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:33 crc kubenswrapper[4779]: I0929 09:30:33.174093 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:33 crc kubenswrapper[4779]: I0929 09:30:33.174104 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:33Z","lastTransitionTime":"2025-09-29T09:30:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:33 crc kubenswrapper[4779]: I0929 09:30:33.276073 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:33 crc kubenswrapper[4779]: I0929 09:30:33.276112 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:33 crc kubenswrapper[4779]: I0929 09:30:33.276122 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:33 crc kubenswrapper[4779]: I0929 09:30:33.276154 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:33 crc kubenswrapper[4779]: I0929 09:30:33.276194 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:33Z","lastTransitionTime":"2025-09-29T09:30:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:33 crc kubenswrapper[4779]: I0929 09:30:33.378152 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:33 crc kubenswrapper[4779]: I0929 09:30:33.378185 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:33 crc kubenswrapper[4779]: I0929 09:30:33.378215 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:33 crc kubenswrapper[4779]: I0929 09:30:33.378230 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:33 crc kubenswrapper[4779]: I0929 09:30:33.378239 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:33Z","lastTransitionTime":"2025-09-29T09:30:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:33 crc kubenswrapper[4779]: I0929 09:30:33.480584 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:33 crc kubenswrapper[4779]: I0929 09:30:33.480669 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:33 crc kubenswrapper[4779]: I0929 09:30:33.480682 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:33 crc kubenswrapper[4779]: I0929 09:30:33.480699 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:33 crc kubenswrapper[4779]: I0929 09:30:33.480710 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:33Z","lastTransitionTime":"2025-09-29T09:30:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:33 crc kubenswrapper[4779]: I0929 09:30:33.582959 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:33 crc kubenswrapper[4779]: I0929 09:30:33.582999 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:33 crc kubenswrapper[4779]: I0929 09:30:33.583010 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:33 crc kubenswrapper[4779]: I0929 09:30:33.583026 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:33 crc kubenswrapper[4779]: I0929 09:30:33.583037 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:33Z","lastTransitionTime":"2025-09-29T09:30:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:33 crc kubenswrapper[4779]: I0929 09:30:33.685513 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:33 crc kubenswrapper[4779]: I0929 09:30:33.685554 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:33 crc kubenswrapper[4779]: I0929 09:30:33.685566 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:33 crc kubenswrapper[4779]: I0929 09:30:33.685583 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:33 crc kubenswrapper[4779]: I0929 09:30:33.685597 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:33Z","lastTransitionTime":"2025-09-29T09:30:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:33 crc kubenswrapper[4779]: I0929 09:30:33.713696 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:30:33 crc kubenswrapper[4779]: I0929 09:30:33.713747 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:30:33 crc kubenswrapper[4779]: E0929 09:30:33.713814 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 09:30:33 crc kubenswrapper[4779]: E0929 09:30:33.713963 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvlbd" podUID="294a4484-da93-4c37-9ecf-18f68f4ad64d" Sep 29 09:30:33 crc kubenswrapper[4779]: I0929 09:30:33.787381 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:33 crc kubenswrapper[4779]: I0929 09:30:33.787415 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:33 crc kubenswrapper[4779]: I0929 09:30:33.787425 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:33 crc kubenswrapper[4779]: I0929 09:30:33.787439 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:33 crc kubenswrapper[4779]: I0929 09:30:33.787450 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:33Z","lastTransitionTime":"2025-09-29T09:30:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:33 crc kubenswrapper[4779]: I0929 09:30:33.890484 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:33 crc kubenswrapper[4779]: I0929 09:30:33.890519 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:33 crc kubenswrapper[4779]: I0929 09:30:33.890528 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:33 crc kubenswrapper[4779]: I0929 09:30:33.890544 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:33 crc kubenswrapper[4779]: I0929 09:30:33.890554 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:33Z","lastTransitionTime":"2025-09-29T09:30:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:33 crc kubenswrapper[4779]: I0929 09:30:33.992643 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:33 crc kubenswrapper[4779]: I0929 09:30:33.992689 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:33 crc kubenswrapper[4779]: I0929 09:30:33.992703 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:33 crc kubenswrapper[4779]: I0929 09:30:33.992721 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:33 crc kubenswrapper[4779]: I0929 09:30:33.992735 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:33Z","lastTransitionTime":"2025-09-29T09:30:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:34 crc kubenswrapper[4779]: I0929 09:30:34.011566 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/294a4484-da93-4c37-9ecf-18f68f4ad64d-metrics-certs\") pod \"network-metrics-daemon-qvlbd\" (UID: \"294a4484-da93-4c37-9ecf-18f68f4ad64d\") " pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:30:34 crc kubenswrapper[4779]: E0929 09:30:34.011745 4779 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 09:30:34 crc kubenswrapper[4779]: E0929 09:30:34.011823 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/294a4484-da93-4c37-9ecf-18f68f4ad64d-metrics-certs podName:294a4484-da93-4c37-9ecf-18f68f4ad64d nodeName:}" failed. No retries permitted until 2025-09-29 09:31:06.01180349 +0000 UTC m=+97.993127394 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/294a4484-da93-4c37-9ecf-18f68f4ad64d-metrics-certs") pod "network-metrics-daemon-qvlbd" (UID: "294a4484-da93-4c37-9ecf-18f68f4ad64d") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 09:30:34 crc kubenswrapper[4779]: I0929 09:30:34.094945 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:34 crc kubenswrapper[4779]: I0929 09:30:34.094978 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:34 crc kubenswrapper[4779]: I0929 09:30:34.094986 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:34 crc kubenswrapper[4779]: I0929 09:30:34.095001 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:34 crc kubenswrapper[4779]: I0929 09:30:34.095011 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:34Z","lastTransitionTime":"2025-09-29T09:30:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:34 crc kubenswrapper[4779]: I0929 09:30:34.197195 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:34 crc kubenswrapper[4779]: I0929 09:30:34.197230 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:34 crc kubenswrapper[4779]: I0929 09:30:34.197242 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:34 crc kubenswrapper[4779]: I0929 09:30:34.197260 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:34 crc kubenswrapper[4779]: I0929 09:30:34.197273 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:34Z","lastTransitionTime":"2025-09-29T09:30:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:34 crc kubenswrapper[4779]: I0929 09:30:34.299565 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:34 crc kubenswrapper[4779]: I0929 09:30:34.299590 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:34 crc kubenswrapper[4779]: I0929 09:30:34.299597 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:34 crc kubenswrapper[4779]: I0929 09:30:34.299610 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:34 crc kubenswrapper[4779]: I0929 09:30:34.299619 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:34Z","lastTransitionTime":"2025-09-29T09:30:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:34 crc kubenswrapper[4779]: I0929 09:30:34.403215 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:34 crc kubenswrapper[4779]: I0929 09:30:34.403264 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:34 crc kubenswrapper[4779]: I0929 09:30:34.403277 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:34 crc kubenswrapper[4779]: I0929 09:30:34.403299 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:34 crc kubenswrapper[4779]: I0929 09:30:34.403312 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:34Z","lastTransitionTime":"2025-09-29T09:30:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:34 crc kubenswrapper[4779]: I0929 09:30:34.505884 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:34 crc kubenswrapper[4779]: I0929 09:30:34.505947 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:34 crc kubenswrapper[4779]: I0929 09:30:34.505959 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:34 crc kubenswrapper[4779]: I0929 09:30:34.505996 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:34 crc kubenswrapper[4779]: I0929 09:30:34.506006 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:34Z","lastTransitionTime":"2025-09-29T09:30:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:34 crc kubenswrapper[4779]: I0929 09:30:34.608482 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:34 crc kubenswrapper[4779]: I0929 09:30:34.608520 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:34 crc kubenswrapper[4779]: I0929 09:30:34.608528 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:34 crc kubenswrapper[4779]: I0929 09:30:34.608542 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:34 crc kubenswrapper[4779]: I0929 09:30:34.608551 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:34Z","lastTransitionTime":"2025-09-29T09:30:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:34 crc kubenswrapper[4779]: I0929 09:30:34.711491 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:34 crc kubenswrapper[4779]: I0929 09:30:34.711521 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:34 crc kubenswrapper[4779]: I0929 09:30:34.711530 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:34 crc kubenswrapper[4779]: I0929 09:30:34.711544 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:34 crc kubenswrapper[4779]: I0929 09:30:34.711553 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:34Z","lastTransitionTime":"2025-09-29T09:30:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:34 crc kubenswrapper[4779]: I0929 09:30:34.713840 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:30:34 crc kubenswrapper[4779]: E0929 09:30:34.713945 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 09:30:34 crc kubenswrapper[4779]: I0929 09:30:34.714227 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:30:34 crc kubenswrapper[4779]: E0929 09:30:34.714393 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 09:30:34 crc kubenswrapper[4779]: I0929 09:30:34.813391 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:34 crc kubenswrapper[4779]: I0929 09:30:34.813422 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:34 crc kubenswrapper[4779]: I0929 09:30:34.813430 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:34 crc kubenswrapper[4779]: I0929 09:30:34.813443 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:34 crc kubenswrapper[4779]: I0929 09:30:34.813451 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:34Z","lastTransitionTime":"2025-09-29T09:30:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:34 crc kubenswrapper[4779]: I0929 09:30:34.916803 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:34 crc kubenswrapper[4779]: I0929 09:30:34.916838 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:34 crc kubenswrapper[4779]: I0929 09:30:34.916846 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:34 crc kubenswrapper[4779]: I0929 09:30:34.916859 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:34 crc kubenswrapper[4779]: I0929 09:30:34.916867 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:34Z","lastTransitionTime":"2025-09-29T09:30:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:35 crc kubenswrapper[4779]: I0929 09:30:35.020108 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:35 crc kubenswrapper[4779]: I0929 09:30:35.020145 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:35 crc kubenswrapper[4779]: I0929 09:30:35.020156 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:35 crc kubenswrapper[4779]: I0929 09:30:35.020171 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:35 crc kubenswrapper[4779]: I0929 09:30:35.020182 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:35Z","lastTransitionTime":"2025-09-29T09:30:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:35 crc kubenswrapper[4779]: I0929 09:30:35.123824 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:35 crc kubenswrapper[4779]: I0929 09:30:35.123881 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:35 crc kubenswrapper[4779]: I0929 09:30:35.123898 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:35 crc kubenswrapper[4779]: I0929 09:30:35.123981 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:35 crc kubenswrapper[4779]: I0929 09:30:35.124000 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:35Z","lastTransitionTime":"2025-09-29T09:30:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:35 crc kubenswrapper[4779]: I0929 09:30:35.226923 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:35 crc kubenswrapper[4779]: I0929 09:30:35.226958 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:35 crc kubenswrapper[4779]: I0929 09:30:35.226967 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:35 crc kubenswrapper[4779]: I0929 09:30:35.226981 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:35 crc kubenswrapper[4779]: I0929 09:30:35.226990 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:35Z","lastTransitionTime":"2025-09-29T09:30:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:35 crc kubenswrapper[4779]: I0929 09:30:35.328671 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:35 crc kubenswrapper[4779]: I0929 09:30:35.328705 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:35 crc kubenswrapper[4779]: I0929 09:30:35.328716 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:35 crc kubenswrapper[4779]: I0929 09:30:35.328732 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:35 crc kubenswrapper[4779]: I0929 09:30:35.328741 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:35Z","lastTransitionTime":"2025-09-29T09:30:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:35 crc kubenswrapper[4779]: I0929 09:30:35.430581 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:35 crc kubenswrapper[4779]: I0929 09:30:35.430664 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:35 crc kubenswrapper[4779]: I0929 09:30:35.430676 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:35 crc kubenswrapper[4779]: I0929 09:30:35.430691 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:35 crc kubenswrapper[4779]: I0929 09:30:35.430702 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:35Z","lastTransitionTime":"2025-09-29T09:30:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:35 crc kubenswrapper[4779]: I0929 09:30:35.533051 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:35 crc kubenswrapper[4779]: I0929 09:30:35.533080 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:35 crc kubenswrapper[4779]: I0929 09:30:35.533090 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:35 crc kubenswrapper[4779]: I0929 09:30:35.533103 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:35 crc kubenswrapper[4779]: I0929 09:30:35.533111 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:35Z","lastTransitionTime":"2025-09-29T09:30:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:35 crc kubenswrapper[4779]: I0929 09:30:35.635295 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:35 crc kubenswrapper[4779]: I0929 09:30:35.635322 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:35 crc kubenswrapper[4779]: I0929 09:30:35.635331 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:35 crc kubenswrapper[4779]: I0929 09:30:35.635343 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:35 crc kubenswrapper[4779]: I0929 09:30:35.635352 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:35Z","lastTransitionTime":"2025-09-29T09:30:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:35 crc kubenswrapper[4779]: I0929 09:30:35.713445 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:30:35 crc kubenswrapper[4779]: I0929 09:30:35.713549 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:30:35 crc kubenswrapper[4779]: E0929 09:30:35.713622 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvlbd" podUID="294a4484-da93-4c37-9ecf-18f68f4ad64d" Sep 29 09:30:35 crc kubenswrapper[4779]: E0929 09:30:35.713706 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 09:30:35 crc kubenswrapper[4779]: I0929 09:30:35.737737 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:35 crc kubenswrapper[4779]: I0929 09:30:35.737767 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:35 crc kubenswrapper[4779]: I0929 09:30:35.737776 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:35 crc kubenswrapper[4779]: I0929 09:30:35.737789 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:35 crc kubenswrapper[4779]: I0929 09:30:35.737799 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:35Z","lastTransitionTime":"2025-09-29T09:30:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:35 crc kubenswrapper[4779]: I0929 09:30:35.839540 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:35 crc kubenswrapper[4779]: I0929 09:30:35.839572 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:35 crc kubenswrapper[4779]: I0929 09:30:35.839581 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:35 crc kubenswrapper[4779]: I0929 09:30:35.839595 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:35 crc kubenswrapper[4779]: I0929 09:30:35.839603 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:35Z","lastTransitionTime":"2025-09-29T09:30:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:35 crc kubenswrapper[4779]: I0929 09:30:35.941416 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:35 crc kubenswrapper[4779]: I0929 09:30:35.941456 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:35 crc kubenswrapper[4779]: I0929 09:30:35.941466 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:35 crc kubenswrapper[4779]: I0929 09:30:35.941482 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:35 crc kubenswrapper[4779]: I0929 09:30:35.941493 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:35Z","lastTransitionTime":"2025-09-29T09:30:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.043725 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.043769 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.043782 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.043798 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.043810 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:36Z","lastTransitionTime":"2025-09-29T09:30:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.141228 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-f2tkr_6b0e23f7-a478-48e2-a745-193a90e87553/kube-multus/0.log" Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.141279 4779 generic.go:334] "Generic (PLEG): container finished" podID="6b0e23f7-a478-48e2-a745-193a90e87553" containerID="944ced544427edda31dc5b05053a08a83a71829a896cb6dba002ca20ee3e56e4" exitCode=1 Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.141306 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-f2tkr" event={"ID":"6b0e23f7-a478-48e2-a745-193a90e87553","Type":"ContainerDied","Data":"944ced544427edda31dc5b05053a08a83a71829a896cb6dba002ca20ee3e56e4"} Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.141685 4779 scope.go:117] "RemoveContainer" containerID="944ced544427edda31dc5b05053a08a83a71829a896cb6dba002ca20ee3e56e4" Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.156398 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.156430 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.156441 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.156457 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.156466 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:36Z","lastTransitionTime":"2025-09-29T09:30:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.168432 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0846537-9071-453d-968b-7537a7233656\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://103d18548d558c96520d7c4c31d3ef44abc2905afa5c26ca3f93d2a1cd6ff604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ce06a39c8fd0fba9a0bd1af96ff34cfb10a153801ecff617db4df5e64eb476b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe3edd15b34d8223ba98808b34bee098cb2ffa1ac4fd3e949007acc04303e370\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22cc6e6e78a7eaac01a6c53d5e5527ef3120c8c44bf28813e65038f939e79872\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cc3416bb385ad0ff953b03c268f186747c47a2dfdf11ba34a3b00be5391e90e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8442bd56bf7c2e6708a3b0f6c21ba803d9bc76a84d32647b507d5961a78e8621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8442bd56bf7c2e6708a3b0f6c21ba803d9bc76a84d32647b507d5961a78e8621\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b03f58ad046bc93040f00ae10e8f265099417fcb84c10b4ca1707d6744729648\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b03f58ad046bc93040f00ae10e8f265099417fcb84c10b4ca1707d6744729648\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7e2abd0b0dc5d3b95c510100e11f7f2fa4b05786325b692244ea51633d2f597e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e2abd0b0dc5d3b95c510100e11f7f2fa4b05786325b692244ea51633d2f597e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:36Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.181289 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19a67159-61a5-4f75-940a-212c850bd498\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ff866eb8fdb247e8bfe638be9525a31733ed912047dcbbc7514ab6324675b6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a69616a6ca151dfda518aae66819aac108f73967794661b878db2480ffbb5b16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4bb91e73e9e2666ab82d5a13fee96ed7406f6178863603e607766ae3172d3ee6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2148aa80d9f71c8424530711e10af5b0bd10437655123ac57b87d1c90a280233\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:36Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.193994 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1a5d3a7-37d9-4a87-864c-e4af7f504a19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462a76735c6e380ec32967ba87ebe13a19e1d3687ac87182f01b23ce8fabe8e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7979507aa9ce4c5e1c0f9a0d07716433ab9cebcc02e8b6ba01251ced0bab99f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5lnlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:36Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.217896 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60d71749-dfb5-4095-b11b-b70f1a549b88\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a334fd50a045f97d97f67b45abf88dc6aaf3d7c2121becc6a9744c86c977b070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d79814aeee8cb50cbe9a3153ed1a99a82921ac740553c82f6aafd1d7f628cbdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7851ecb6e73d56037db56d7d12fec0a095403080daa7bbf0d4cd79d9a4fd1bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa8dee8aadd625828a22e22668d8f7295ddc3fe24976be6c8610da1157e28d7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a11183ba612a838b92d401b13a2e0d7d1c19de533ecef56b4305e5a9f0784bb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d6677148b1efa21f5eee1dcbccef6d6ea0492513bfa621b1a862488653bc8f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c43f1fb41b70a75fdced863cef4c3e186627418facef4d9e5cf4ff1ea6db6432\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c43f1fb41b70a75fdced863cef4c3e186627418facef4d9e5cf4ff1ea6db6432\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T09:30:11Z\\\",\\\"message\\\":\\\"o start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:11Z is after 2025-08-24T17:21:41Z]\\\\nI0929 09:30:11.835139 6393 ovn.go:134] Ensuring zone local for Pod openshift-image-registry/node-ca-gx6f2 in node crc\\\\nI0929 09:30:11.835086 6393 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-network-console/networking-console-plugin_TCP_cluster\\\\\\\", UUID:\\\\\\\"ab0b1d51-5ec6-479b-8881-93dfa8d30337\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-network-console/networking-console-plugin\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-network-console/networking-console-plugin_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Pro\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:30:11Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-ncxc4_openshift-ovn-kubernetes(60d71749-dfb5-4095-b11b-b70f1a549b88)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e140d46db39380a5737d78e1aa00ad1a5a9cc217de910ca4212beae87764795\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ncxc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:36Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.230956 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fxjq2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c469d18e-5b53-4a41-9305-87ae2f8db671\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c8ba3507185bd18ea878f73feed6ba33fb15a49a9f2575a66a88ac28fedef14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:30:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqdj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://01f30b5162df9f09f1af03ebeb5f7218a68b49bf4ff38cd06f3dad13d5042f88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:30:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqdj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:30:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fxjq2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:36Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.243064 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d52ae941-3523-4957-aa78-7c744b7a14d3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://813ea0b64e4e968f7808a4f272fe8c987b24c23a26e2fd7e1fe2f2bbd3b0498e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://def414c582c214da578be9ef1b899f09185c7b6318cd6b191920d6293e89873f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e18a438c1a81da7779de1fc69aeff90acf287ced68bb63bf1faac1ba595ec528\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aeb35312a74983b7c72a2349d1b461968b67ae4cf725beeaa8699fd1c9c5845e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aeb35312a74983b7c72a2349d1b461968b67ae4cf725beeaa8699fd1c9c5845e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:36Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.256341 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:36Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.259370 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.259404 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.259414 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.259428 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.259439 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:36Z","lastTransitionTime":"2025-09-29T09:30:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.268990 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2adc78ec8f54811c8fd09a5e558132f9f57501d9a92d4738935148bb4e714eb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:36Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.281562 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5584" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"564bff56-93cb-42ac-bd34-bbe97f99f411\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedbde6cf29bb77c762d7da2d2e414d3e7c2c6edb3addd97473367b89b2e3fe7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvwtv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5584\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:36Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.290177 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gx6f2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25e887ba-720d-4f7b-9763-5703781fd8bf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a1604b3eb7d80cc28d7d36550e740405d3012bb83109444bf7354793ebbdeb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9lw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gx6f2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:36Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.299783 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-qvlbd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"294a4484-da93-4c37-9ecf-18f68f4ad64d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nsgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nsgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:30:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-qvlbd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:36Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.312606 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e291840-9db5-4515-810a-190428263dfb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6f627c2a2c06fbe034514d02939db8b7576b8cb753aa65a45a271482cf138c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde5335a7ca2db0e56ded3f2fc58506882df09718e6ada6dd1849b891ebc3dd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0f81c275d3f99ff02d4d105dfff34249cff38150c1aa6199bdc765773f0fa2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33e68ef001a27a30ef12923f95eaaef45667d18c8dab2997491caf12f595ed0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6837fc3f6d91505858a90e49945041b663c864b4ebcbd2ecc086c3575b47c1f7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T09:29:42Z\\\",\\\"message\\\":\\\"W0929 09:29:31.795697 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 09:29:31.796138 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759138171 cert, and key in /tmp/serving-cert-3245563206/serving-signer.crt, /tmp/serving-cert-3245563206/serving-signer.key\\\\nI0929 09:29:32.157414 1 observer_polling.go:159] Starting file observer\\\\nW0929 09:29:32.160455 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 09:29:32.160736 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 09:29:32.163324 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3245563206/tls.crt::/tmp/serving-cert-3245563206/tls.key\\\\\\\"\\\\nF0929 09:29:42.581603 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55099c1030bd4943e0df0b13b1203c7143f79edf5e02c8353ccd042610e8059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:36Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.324767 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:36Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.336587 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f96f3b3b0028e52c62e9b886d5f9c3347ec85f91ec0e5ef3de48bd216a2c687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f10690e584c904be791611af52b807ee3d20cb5551821042a1c5e71e1f1571d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:36Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.348220 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f75b386997d5f82c8a8136e8758dac2b770b2ef60bec255c70978046c3e6cd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:36Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.360509 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:36Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.361342 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.361408 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.361424 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.361442 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.361455 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:36Z","lastTransitionTime":"2025-09-29T09:30:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.373203 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2tkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b0e23f7-a478-48e2-a745-193a90e87553\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://944ced544427edda31dc5b05053a08a83a71829a896cb6dba002ca20ee3e56e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://944ced544427edda31dc5b05053a08a83a71829a896cb6dba002ca20ee3e56e4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T09:30:35Z\\\",\\\"message\\\":\\\"2025-09-29T09:29:50+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_66248466-581a-43c7-a305-4bf0ed7a54f9\\\\n2025-09-29T09:29:50+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_66248466-581a-43c7-a305-4bf0ed7a54f9 to /host/opt/cni/bin/\\\\n2025-09-29T09:29:50Z [verbose] multus-daemon started\\\\n2025-09-29T09:29:50Z [verbose] Readiness Indicator file check\\\\n2025-09-29T09:30:35Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5rx8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2tkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:36Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.385089 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-twvvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3a42c70c25df51312e2e5ede663678cbccf94d50dad43886557024669dd2e82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfe38229bcd948f0f8e1e234c8e4ea16d4dbc8c5c4252f5486048db0c9910412\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfe38229bcd948f0f8e1e234c8e4ea16d4dbc8c5c4252f5486048db0c9910412\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acbf355d9614f64771657a4103437e9c55f3b2b5080282d7ad3af70a5f44ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbf355d9614f64771657a4103437e9c55f3b2b5080282d7ad3af70a5f44ffce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b854329d7e329a9099a032b8e91ab2064164ae0e43e57b61c5c55b42fca2a9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b854329d7e329a9099a032b8e91ab2064164ae0e43e57b61c5c55b42fca2a9ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93312b300c9348ca8caa0f12bd7825d6b7d1d720babd765aa679c570220ac15a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93312b300c9348ca8caa0f12bd7825d6b7d1d720babd765aa679c570220ac15a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-twvvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:36Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.463967 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.464012 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.464024 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.464041 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.464053 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:36Z","lastTransitionTime":"2025-09-29T09:30:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.566494 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.566543 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.566556 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.566572 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.566583 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:36Z","lastTransitionTime":"2025-09-29T09:30:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.673216 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.673253 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.673266 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.673284 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.673297 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:36Z","lastTransitionTime":"2025-09-29T09:30:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.714099 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.714189 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:30:36 crc kubenswrapper[4779]: E0929 09:30:36.714227 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 09:30:36 crc kubenswrapper[4779]: E0929 09:30:36.714462 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.775733 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.775782 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.775797 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.775818 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.775835 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:36Z","lastTransitionTime":"2025-09-29T09:30:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.878621 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.878709 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.878720 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.878738 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.878750 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:36Z","lastTransitionTime":"2025-09-29T09:30:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.981619 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.981845 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.981957 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.982025 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:36 crc kubenswrapper[4779]: I0929 09:30:36.982100 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:36Z","lastTransitionTime":"2025-09-29T09:30:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.084541 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.084833 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.084951 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.085065 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.085145 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:37Z","lastTransitionTime":"2025-09-29T09:30:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.146383 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-f2tkr_6b0e23f7-a478-48e2-a745-193a90e87553/kube-multus/0.log" Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.146681 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-f2tkr" event={"ID":"6b0e23f7-a478-48e2-a745-193a90e87553","Type":"ContainerStarted","Data":"10bf019bbe72b02d7f7545cd6f01bde45c32ff2222682d62dac1992cfcdca115"} Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.158017 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5584" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"564bff56-93cb-42ac-bd34-bbe97f99f411\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedbde6cf29bb77c762d7da2d2e414d3e7c2c6edb3addd97473367b89b2e3fe7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvwtv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5584\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:37Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.169245 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gx6f2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25e887ba-720d-4f7b-9763-5703781fd8bf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a1604b3eb7d80cc28d7d36550e740405d3012bb83109444bf7354793ebbdeb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9lw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gx6f2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:37Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.177712 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-qvlbd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"294a4484-da93-4c37-9ecf-18f68f4ad64d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nsgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nsgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:30:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-qvlbd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:37Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.187117 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.187182 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.187195 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.187211 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.187224 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:37Z","lastTransitionTime":"2025-09-29T09:30:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.188614 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2adc78ec8f54811c8fd09a5e558132f9f57501d9a92d4738935148bb4e714eb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:37Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.199102 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:37Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.210076 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f96f3b3b0028e52c62e9b886d5f9c3347ec85f91ec0e5ef3de48bd216a2c687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f10690e584c904be791611af52b807ee3d20cb5551821042a1c5e71e1f1571d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:37Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.219773 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f75b386997d5f82c8a8136e8758dac2b770b2ef60bec255c70978046c3e6cd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:37Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.230600 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:37Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.244803 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2tkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b0e23f7-a478-48e2-a745-193a90e87553\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10bf019bbe72b02d7f7545cd6f01bde45c32ff2222682d62dac1992cfcdca115\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://944ced544427edda31dc5b05053a08a83a71829a896cb6dba002ca20ee3e56e4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T09:30:35Z\\\",\\\"message\\\":\\\"2025-09-29T09:29:50+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_66248466-581a-43c7-a305-4bf0ed7a54f9\\\\n2025-09-29T09:29:50+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_66248466-581a-43c7-a305-4bf0ed7a54f9 to /host/opt/cni/bin/\\\\n2025-09-29T09:29:50Z [verbose] multus-daemon started\\\\n2025-09-29T09:29:50Z [verbose] Readiness Indicator file check\\\\n2025-09-29T09:30:35Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:30:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5rx8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2tkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:37Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.259538 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-twvvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3a42c70c25df51312e2e5ede663678cbccf94d50dad43886557024669dd2e82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfe38229bcd948f0f8e1e234c8e4ea16d4dbc8c5c4252f5486048db0c9910412\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfe38229bcd948f0f8e1e234c8e4ea16d4dbc8c5c4252f5486048db0c9910412\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acbf355d9614f64771657a4103437e9c55f3b2b5080282d7ad3af70a5f44ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbf355d9614f64771657a4103437e9c55f3b2b5080282d7ad3af70a5f44ffce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b854329d7e329a9099a032b8e91ab2064164ae0e43e57b61c5c55b42fca2a9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b854329d7e329a9099a032b8e91ab2064164ae0e43e57b61c5c55b42fca2a9ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93312b300c9348ca8caa0f12bd7825d6b7d1d720babd765aa679c570220ac15a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93312b300c9348ca8caa0f12bd7825d6b7d1d720babd765aa679c570220ac15a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-twvvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:37Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.272664 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e291840-9db5-4515-810a-190428263dfb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6f627c2a2c06fbe034514d02939db8b7576b8cb753aa65a45a271482cf138c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde5335a7ca2db0e56ded3f2fc58506882df09718e6ada6dd1849b891ebc3dd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0f81c275d3f99ff02d4d105dfff34249cff38150c1aa6199bdc765773f0fa2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33e68ef001a27a30ef12923f95eaaef45667d18c8dab2997491caf12f595ed0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6837fc3f6d91505858a90e49945041b663c864b4ebcbd2ecc086c3575b47c1f7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T09:29:42Z\\\",\\\"message\\\":\\\"W0929 09:29:31.795697 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 09:29:31.796138 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759138171 cert, and key in /tmp/serving-cert-3245563206/serving-signer.crt, /tmp/serving-cert-3245563206/serving-signer.key\\\\nI0929 09:29:32.157414 1 observer_polling.go:159] Starting file observer\\\\nW0929 09:29:32.160455 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 09:29:32.160736 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 09:29:32.163324 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3245563206/tls.crt::/tmp/serving-cert-3245563206/tls.key\\\\\\\"\\\\nF0929 09:29:42.581603 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55099c1030bd4943e0df0b13b1203c7143f79edf5e02c8353ccd042610e8059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:37Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.285286 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19a67159-61a5-4f75-940a-212c850bd498\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ff866eb8fdb247e8bfe638be9525a31733ed912047dcbbc7514ab6324675b6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a69616a6ca151dfda518aae66819aac108f73967794661b878db2480ffbb5b16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4bb91e73e9e2666ab82d5a13fee96ed7406f6178863603e607766ae3172d3ee6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2148aa80d9f71c8424530711e10af5b0bd10437655123ac57b87d1c90a280233\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:37Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.289438 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.289470 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.289479 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.289492 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.289504 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:37Z","lastTransitionTime":"2025-09-29T09:30:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.297773 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1a5d3a7-37d9-4a87-864c-e4af7f504a19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462a76735c6e380ec32967ba87ebe13a19e1d3687ac87182f01b23ce8fabe8e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7979507aa9ce4c5e1c0f9a0d07716433ab9cebcc02e8b6ba01251ced0bab99f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5lnlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:37Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.316680 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60d71749-dfb5-4095-b11b-b70f1a549b88\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a334fd50a045f97d97f67b45abf88dc6aaf3d7c2121becc6a9744c86c977b070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d79814aeee8cb50cbe9a3153ed1a99a82921ac740553c82f6aafd1d7f628cbdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7851ecb6e73d56037db56d7d12fec0a095403080daa7bbf0d4cd79d9a4fd1bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa8dee8aadd625828a22e22668d8f7295ddc3fe24976be6c8610da1157e28d7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a11183ba612a838b92d401b13a2e0d7d1c19de533ecef56b4305e5a9f0784bb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d6677148b1efa21f5eee1dcbccef6d6ea0492513bfa621b1a862488653bc8f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c43f1fb41b70a75fdced863cef4c3e186627418facef4d9e5cf4ff1ea6db6432\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c43f1fb41b70a75fdced863cef4c3e186627418facef4d9e5cf4ff1ea6db6432\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T09:30:11Z\\\",\\\"message\\\":\\\"o start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:11Z is after 2025-08-24T17:21:41Z]\\\\nI0929 09:30:11.835139 6393 ovn.go:134] Ensuring zone local for Pod openshift-image-registry/node-ca-gx6f2 in node crc\\\\nI0929 09:30:11.835086 6393 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-network-console/networking-console-plugin_TCP_cluster\\\\\\\", UUID:\\\\\\\"ab0b1d51-5ec6-479b-8881-93dfa8d30337\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-network-console/networking-console-plugin\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-network-console/networking-console-plugin_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Pro\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:30:11Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-ncxc4_openshift-ovn-kubernetes(60d71749-dfb5-4095-b11b-b70f1a549b88)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e140d46db39380a5737d78e1aa00ad1a5a9cc217de910ca4212beae87764795\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ncxc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:37Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.328725 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fxjq2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c469d18e-5b53-4a41-9305-87ae2f8db671\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c8ba3507185bd18ea878f73feed6ba33fb15a49a9f2575a66a88ac28fedef14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:30:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqdj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://01f30b5162df9f09f1af03ebeb5f7218a68b49bf4ff38cd06f3dad13d5042f88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:30:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqdj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:30:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fxjq2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:37Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.349114 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0846537-9071-453d-968b-7537a7233656\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://103d18548d558c96520d7c4c31d3ef44abc2905afa5c26ca3f93d2a1cd6ff604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ce06a39c8fd0fba9a0bd1af96ff34cfb10a153801ecff617db4df5e64eb476b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe3edd15b34d8223ba98808b34bee098cb2ffa1ac4fd3e949007acc04303e370\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22cc6e6e78a7eaac01a6c53d5e5527ef3120c8c44bf28813e65038f939e79872\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cc3416bb385ad0ff953b03c268f186747c47a2dfdf11ba34a3b00be5391e90e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8442bd56bf7c2e6708a3b0f6c21ba803d9bc76a84d32647b507d5961a78e8621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8442bd56bf7c2e6708a3b0f6c21ba803d9bc76a84d32647b507d5961a78e8621\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b03f58ad046bc93040f00ae10e8f265099417fcb84c10b4ca1707d6744729648\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b03f58ad046bc93040f00ae10e8f265099417fcb84c10b4ca1707d6744729648\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7e2abd0b0dc5d3b95c510100e11f7f2fa4b05786325b692244ea51633d2f597e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e2abd0b0dc5d3b95c510100e11f7f2fa4b05786325b692244ea51633d2f597e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:37Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.361919 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:37Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.372921 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d52ae941-3523-4957-aa78-7c744b7a14d3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://813ea0b64e4e968f7808a4f272fe8c987b24c23a26e2fd7e1fe2f2bbd3b0498e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://def414c582c214da578be9ef1b899f09185c7b6318cd6b191920d6293e89873f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e18a438c1a81da7779de1fc69aeff90acf287ced68bb63bf1faac1ba595ec528\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aeb35312a74983b7c72a2349d1b461968b67ae4cf725beeaa8699fd1c9c5845e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aeb35312a74983b7c72a2349d1b461968b67ae4cf725beeaa8699fd1c9c5845e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:37Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.391298 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.391333 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.391343 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.391359 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.391368 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:37Z","lastTransitionTime":"2025-09-29T09:30:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.493386 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.493417 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.493427 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.493439 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.493448 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:37Z","lastTransitionTime":"2025-09-29T09:30:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.595329 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.595392 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.595408 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.595429 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.595441 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:37Z","lastTransitionTime":"2025-09-29T09:30:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.697571 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.697635 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.697656 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.697682 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.697701 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:37Z","lastTransitionTime":"2025-09-29T09:30:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.714087 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.714145 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:30:37 crc kubenswrapper[4779]: E0929 09:30:37.714216 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvlbd" podUID="294a4484-da93-4c37-9ecf-18f68f4ad64d" Sep 29 09:30:37 crc kubenswrapper[4779]: E0929 09:30:37.714313 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.800328 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.800367 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.800378 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.800392 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.800403 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:37Z","lastTransitionTime":"2025-09-29T09:30:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.903386 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.903457 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.903468 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.903482 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:37 crc kubenswrapper[4779]: I0929 09:30:37.903491 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:37Z","lastTransitionTime":"2025-09-29T09:30:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.005898 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.006044 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.006068 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.006096 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.006118 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:38Z","lastTransitionTime":"2025-09-29T09:30:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.108555 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.108603 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.108620 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.108642 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.108658 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:38Z","lastTransitionTime":"2025-09-29T09:30:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.212372 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.212411 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.212421 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.212439 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.212459 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:38Z","lastTransitionTime":"2025-09-29T09:30:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.314010 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.314054 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.314065 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.314081 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.314091 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:38Z","lastTransitionTime":"2025-09-29T09:30:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.417133 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.417192 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.417210 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.417233 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.417249 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:38Z","lastTransitionTime":"2025-09-29T09:30:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.519977 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.520012 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.520020 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.520036 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.520045 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:38Z","lastTransitionTime":"2025-09-29T09:30:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.622628 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.622661 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.622670 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.622683 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.622692 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:38Z","lastTransitionTime":"2025-09-29T09:30:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.713693 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.713704 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:30:38 crc kubenswrapper[4779]: E0929 09:30:38.713837 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 09:30:38 crc kubenswrapper[4779]: E0929 09:30:38.713988 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.724875 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.724962 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.724979 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.725003 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.725020 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:38Z","lastTransitionTime":"2025-09-29T09:30:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.726714 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-qvlbd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"294a4484-da93-4c37-9ecf-18f68f4ad64d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nsgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nsgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:30:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-qvlbd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:38Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.742719 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2adc78ec8f54811c8fd09a5e558132f9f57501d9a92d4738935148bb4e714eb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:38Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.753612 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5584" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"564bff56-93cb-42ac-bd34-bbe97f99f411\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedbde6cf29bb77c762d7da2d2e414d3e7c2c6edb3addd97473367b89b2e3fe7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvwtv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5584\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:38Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.761368 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gx6f2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25e887ba-720d-4f7b-9763-5703781fd8bf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a1604b3eb7d80cc28d7d36550e740405d3012bb83109444bf7354793ebbdeb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9lw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gx6f2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:38Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.771646 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f96f3b3b0028e52c62e9b886d5f9c3347ec85f91ec0e5ef3de48bd216a2c687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f10690e584c904be791611af52b807ee3d20cb5551821042a1c5e71e1f1571d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:38Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.782653 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f75b386997d5f82c8a8136e8758dac2b770b2ef60bec255c70978046c3e6cd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:38Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.793291 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:38Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.804825 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2tkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b0e23f7-a478-48e2-a745-193a90e87553\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10bf019bbe72b02d7f7545cd6f01bde45c32ff2222682d62dac1992cfcdca115\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://944ced544427edda31dc5b05053a08a83a71829a896cb6dba002ca20ee3e56e4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T09:30:35Z\\\",\\\"message\\\":\\\"2025-09-29T09:29:50+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_66248466-581a-43c7-a305-4bf0ed7a54f9\\\\n2025-09-29T09:29:50+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_66248466-581a-43c7-a305-4bf0ed7a54f9 to /host/opt/cni/bin/\\\\n2025-09-29T09:29:50Z [verbose] multus-daemon started\\\\n2025-09-29T09:29:50Z [verbose] Readiness Indicator file check\\\\n2025-09-29T09:30:35Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:30:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5rx8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2tkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:38Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.819880 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-twvvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3a42c70c25df51312e2e5ede663678cbccf94d50dad43886557024669dd2e82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfe38229bcd948f0f8e1e234c8e4ea16d4dbc8c5c4252f5486048db0c9910412\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfe38229bcd948f0f8e1e234c8e4ea16d4dbc8c5c4252f5486048db0c9910412\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acbf355d9614f64771657a4103437e9c55f3b2b5080282d7ad3af70a5f44ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbf355d9614f64771657a4103437e9c55f3b2b5080282d7ad3af70a5f44ffce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b854329d7e329a9099a032b8e91ab2064164ae0e43e57b61c5c55b42fca2a9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b854329d7e329a9099a032b8e91ab2064164ae0e43e57b61c5c55b42fca2a9ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93312b300c9348ca8caa0f12bd7825d6b7d1d720babd765aa679c570220ac15a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93312b300c9348ca8caa0f12bd7825d6b7d1d720babd765aa679c570220ac15a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-twvvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:38Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.826689 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.826715 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.826723 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.826735 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.826744 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:38Z","lastTransitionTime":"2025-09-29T09:30:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.838712 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e291840-9db5-4515-810a-190428263dfb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6f627c2a2c06fbe034514d02939db8b7576b8cb753aa65a45a271482cf138c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde5335a7ca2db0e56ded3f2fc58506882df09718e6ada6dd1849b891ebc3dd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0f81c275d3f99ff02d4d105dfff34249cff38150c1aa6199bdc765773f0fa2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33e68ef001a27a30ef12923f95eaaef45667d18c8dab2997491caf12f595ed0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6837fc3f6d91505858a90e49945041b663c864b4ebcbd2ecc086c3575b47c1f7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T09:29:42Z\\\",\\\"message\\\":\\\"W0929 09:29:31.795697 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 09:29:31.796138 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759138171 cert, and key in /tmp/serving-cert-3245563206/serving-signer.crt, /tmp/serving-cert-3245563206/serving-signer.key\\\\nI0929 09:29:32.157414 1 observer_polling.go:159] Starting file observer\\\\nW0929 09:29:32.160455 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 09:29:32.160736 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 09:29:32.163324 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3245563206/tls.crt::/tmp/serving-cert-3245563206/tls.key\\\\\\\"\\\\nF0929 09:29:42.581603 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55099c1030bd4943e0df0b13b1203c7143f79edf5e02c8353ccd042610e8059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:38Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.851139 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:38Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.869642 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60d71749-dfb5-4095-b11b-b70f1a549b88\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a334fd50a045f97d97f67b45abf88dc6aaf3d7c2121becc6a9744c86c977b070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d79814aeee8cb50cbe9a3153ed1a99a82921ac740553c82f6aafd1d7f628cbdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7851ecb6e73d56037db56d7d12fec0a095403080daa7bbf0d4cd79d9a4fd1bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa8dee8aadd625828a22e22668d8f7295ddc3fe24976be6c8610da1157e28d7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a11183ba612a838b92d401b13a2e0d7d1c19de533ecef56b4305e5a9f0784bb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d6677148b1efa21f5eee1dcbccef6d6ea0492513bfa621b1a862488653bc8f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c43f1fb41b70a75fdced863cef4c3e186627418facef4d9e5cf4ff1ea6db6432\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c43f1fb41b70a75fdced863cef4c3e186627418facef4d9e5cf4ff1ea6db6432\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T09:30:11Z\\\",\\\"message\\\":\\\"o start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:11Z is after 2025-08-24T17:21:41Z]\\\\nI0929 09:30:11.835139 6393 ovn.go:134] Ensuring zone local for Pod openshift-image-registry/node-ca-gx6f2 in node crc\\\\nI0929 09:30:11.835086 6393 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-network-console/networking-console-plugin_TCP_cluster\\\\\\\", UUID:\\\\\\\"ab0b1d51-5ec6-479b-8881-93dfa8d30337\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-network-console/networking-console-plugin\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-network-console/networking-console-plugin_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Pro\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:30:11Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-ncxc4_openshift-ovn-kubernetes(60d71749-dfb5-4095-b11b-b70f1a549b88)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e140d46db39380a5737d78e1aa00ad1a5a9cc217de910ca4212beae87764795\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ncxc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:38Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.880662 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fxjq2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c469d18e-5b53-4a41-9305-87ae2f8db671\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c8ba3507185bd18ea878f73feed6ba33fb15a49a9f2575a66a88ac28fedef14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:30:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqdj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://01f30b5162df9f09f1af03ebeb5f7218a68b49bf4ff38cd06f3dad13d5042f88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:30:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqdj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:30:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fxjq2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:38Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.901872 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0846537-9071-453d-968b-7537a7233656\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://103d18548d558c96520d7c4c31d3ef44abc2905afa5c26ca3f93d2a1cd6ff604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ce06a39c8fd0fba9a0bd1af96ff34cfb10a153801ecff617db4df5e64eb476b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe3edd15b34d8223ba98808b34bee098cb2ffa1ac4fd3e949007acc04303e370\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22cc6e6e78a7eaac01a6c53d5e5527ef3120c8c44bf28813e65038f939e79872\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cc3416bb385ad0ff953b03c268f186747c47a2dfdf11ba34a3b00be5391e90e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8442bd56bf7c2e6708a3b0f6c21ba803d9bc76a84d32647b507d5961a78e8621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8442bd56bf7c2e6708a3b0f6c21ba803d9bc76a84d32647b507d5961a78e8621\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b03f58ad046bc93040f00ae10e8f265099417fcb84c10b4ca1707d6744729648\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b03f58ad046bc93040f00ae10e8f265099417fcb84c10b4ca1707d6744729648\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7e2abd0b0dc5d3b95c510100e11f7f2fa4b05786325b692244ea51633d2f597e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e2abd0b0dc5d3b95c510100e11f7f2fa4b05786325b692244ea51633d2f597e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:38Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.912735 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19a67159-61a5-4f75-940a-212c850bd498\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ff866eb8fdb247e8bfe638be9525a31733ed912047dcbbc7514ab6324675b6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a69616a6ca151dfda518aae66819aac108f73967794661b878db2480ffbb5b16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4bb91e73e9e2666ab82d5a13fee96ed7406f6178863603e607766ae3172d3ee6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2148aa80d9f71c8424530711e10af5b0bd10437655123ac57b87d1c90a280233\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:38Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.923462 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1a5d3a7-37d9-4a87-864c-e4af7f504a19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462a76735c6e380ec32967ba87ebe13a19e1d3687ac87182f01b23ce8fabe8e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7979507aa9ce4c5e1c0f9a0d07716433ab9cebcc02e8b6ba01251ced0bab99f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5lnlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:38Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.929154 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.929209 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.929219 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.929231 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.929240 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:38Z","lastTransitionTime":"2025-09-29T09:30:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.933055 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d52ae941-3523-4957-aa78-7c744b7a14d3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://813ea0b64e4e968f7808a4f272fe8c987b24c23a26e2fd7e1fe2f2bbd3b0498e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://def414c582c214da578be9ef1b899f09185c7b6318cd6b191920d6293e89873f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e18a438c1a81da7779de1fc69aeff90acf287ced68bb63bf1faac1ba595ec528\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aeb35312a74983b7c72a2349d1b461968b67ae4cf725beeaa8699fd1c9c5845e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aeb35312a74983b7c72a2349d1b461968b67ae4cf725beeaa8699fd1c9c5845e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:38Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:38 crc kubenswrapper[4779]: I0929 09:30:38.944591 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:38Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.019710 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.019766 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.019783 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.019803 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.019817 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:39Z","lastTransitionTime":"2025-09-29T09:30:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:39 crc kubenswrapper[4779]: E0929 09:30:39.031395 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"25d3a4e6-deea-47ab-ac6b-f80ccadc03c7\\\",\\\"systemUUID\\\":\\\"6af97324-aa9b-4cb6-ab41-66056c52c25a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:39Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.034804 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.034842 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.034853 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.034869 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.034880 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:39Z","lastTransitionTime":"2025-09-29T09:30:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:39 crc kubenswrapper[4779]: E0929 09:30:39.046713 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"25d3a4e6-deea-47ab-ac6b-f80ccadc03c7\\\",\\\"systemUUID\\\":\\\"6af97324-aa9b-4cb6-ab41-66056c52c25a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:39Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.053966 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.054008 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.054022 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.054043 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.054065 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:39Z","lastTransitionTime":"2025-09-29T09:30:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:39 crc kubenswrapper[4779]: E0929 09:30:39.066321 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"25d3a4e6-deea-47ab-ac6b-f80ccadc03c7\\\",\\\"systemUUID\\\":\\\"6af97324-aa9b-4cb6-ab41-66056c52c25a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:39Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.069812 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.069855 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.069864 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.069877 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.069887 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:39Z","lastTransitionTime":"2025-09-29T09:30:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:39 crc kubenswrapper[4779]: E0929 09:30:39.080466 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"25d3a4e6-deea-47ab-ac6b-f80ccadc03c7\\\",\\\"systemUUID\\\":\\\"6af97324-aa9b-4cb6-ab41-66056c52c25a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:39Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.083887 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.083935 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.083944 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.083961 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.083971 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:39Z","lastTransitionTime":"2025-09-29T09:30:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:39 crc kubenswrapper[4779]: E0929 09:30:39.094977 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"25d3a4e6-deea-47ab-ac6b-f80ccadc03c7\\\",\\\"systemUUID\\\":\\\"6af97324-aa9b-4cb6-ab41-66056c52c25a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:39Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:39 crc kubenswrapper[4779]: E0929 09:30:39.095222 4779 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.096561 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.096588 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.096596 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.096609 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.096619 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:39Z","lastTransitionTime":"2025-09-29T09:30:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.198778 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.198827 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.198840 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.198856 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.198867 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:39Z","lastTransitionTime":"2025-09-29T09:30:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.301347 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.301392 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.301436 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.301456 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.301467 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:39Z","lastTransitionTime":"2025-09-29T09:30:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.403580 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.403622 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.403636 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.403653 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.403664 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:39Z","lastTransitionTime":"2025-09-29T09:30:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.505838 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.505881 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.505893 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.505948 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.505964 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:39Z","lastTransitionTime":"2025-09-29T09:30:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.608545 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.608584 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.608595 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.608614 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.608624 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:39Z","lastTransitionTime":"2025-09-29T09:30:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.710652 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.710693 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.710717 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.710732 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.710742 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:39Z","lastTransitionTime":"2025-09-29T09:30:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.713895 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.714153 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:30:39 crc kubenswrapper[4779]: E0929 09:30:39.714228 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvlbd" podUID="294a4484-da93-4c37-9ecf-18f68f4ad64d" Sep 29 09:30:39 crc kubenswrapper[4779]: E0929 09:30:39.714315 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.714773 4779 scope.go:117] "RemoveContainer" containerID="c43f1fb41b70a75fdced863cef4c3e186627418facef4d9e5cf4ff1ea6db6432" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.813553 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.813585 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.813594 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.813608 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.813618 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:39Z","lastTransitionTime":"2025-09-29T09:30:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.915364 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.915390 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.915400 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.915416 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:39 crc kubenswrapper[4779]: I0929 09:30:39.915428 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:39Z","lastTransitionTime":"2025-09-29T09:30:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.017756 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.017804 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.017815 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.017830 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.017841 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:40Z","lastTransitionTime":"2025-09-29T09:30:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.119765 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.119797 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.119808 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.119823 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.119831 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:40Z","lastTransitionTime":"2025-09-29T09:30:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.156116 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-ncxc4_60d71749-dfb5-4095-b11b-b70f1a549b88/ovnkube-controller/2.log" Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.158213 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" event={"ID":"60d71749-dfb5-4095-b11b-b70f1a549b88","Type":"ContainerStarted","Data":"1d18b61ad4ffcd8bc213caf632174b18bbb9e1f17b46ba199720c9f155c292f5"} Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.158573 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.190713 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d52ae941-3523-4957-aa78-7c744b7a14d3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://813ea0b64e4e968f7808a4f272fe8c987b24c23a26e2fd7e1fe2f2bbd3b0498e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://def414c582c214da578be9ef1b899f09185c7b6318cd6b191920d6293e89873f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e18a438c1a81da7779de1fc69aeff90acf287ced68bb63bf1faac1ba595ec528\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aeb35312a74983b7c72a2349d1b461968b67ae4cf725beeaa8699fd1c9c5845e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aeb35312a74983b7c72a2349d1b461968b67ae4cf725beeaa8699fd1c9c5845e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:40Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.210619 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:40Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.222374 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.222406 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.222417 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.222434 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.222445 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:40Z","lastTransitionTime":"2025-09-29T09:30:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.223587 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2adc78ec8f54811c8fd09a5e558132f9f57501d9a92d4738935148bb4e714eb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:40Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.233230 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5584" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"564bff56-93cb-42ac-bd34-bbe97f99f411\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedbde6cf29bb77c762d7da2d2e414d3e7c2c6edb3addd97473367b89b2e3fe7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvwtv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5584\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:40Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.242802 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gx6f2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25e887ba-720d-4f7b-9763-5703781fd8bf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a1604b3eb7d80cc28d7d36550e740405d3012bb83109444bf7354793ebbdeb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9lw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gx6f2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:40Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.253676 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-qvlbd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"294a4484-da93-4c37-9ecf-18f68f4ad64d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nsgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nsgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:30:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-qvlbd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:40Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.265266 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-twvvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3a42c70c25df51312e2e5ede663678cbccf94d50dad43886557024669dd2e82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfe38229bcd948f0f8e1e234c8e4ea16d4dbc8c5c4252f5486048db0c9910412\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfe38229bcd948f0f8e1e234c8e4ea16d4dbc8c5c4252f5486048db0c9910412\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acbf355d9614f64771657a4103437e9c55f3b2b5080282d7ad3af70a5f44ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbf355d9614f64771657a4103437e9c55f3b2b5080282d7ad3af70a5f44ffce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b854329d7e329a9099a032b8e91ab2064164ae0e43e57b61c5c55b42fca2a9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b854329d7e329a9099a032b8e91ab2064164ae0e43e57b61c5c55b42fca2a9ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93312b300c9348ca8caa0f12bd7825d6b7d1d720babd765aa679c570220ac15a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93312b300c9348ca8caa0f12bd7825d6b7d1d720babd765aa679c570220ac15a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-twvvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:40Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.276152 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e291840-9db5-4515-810a-190428263dfb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6f627c2a2c06fbe034514d02939db8b7576b8cb753aa65a45a271482cf138c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde5335a7ca2db0e56ded3f2fc58506882df09718e6ada6dd1849b891ebc3dd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0f81c275d3f99ff02d4d105dfff34249cff38150c1aa6199bdc765773f0fa2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33e68ef001a27a30ef12923f95eaaef45667d18c8dab2997491caf12f595ed0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6837fc3f6d91505858a90e49945041b663c864b4ebcbd2ecc086c3575b47c1f7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T09:29:42Z\\\",\\\"message\\\":\\\"W0929 09:29:31.795697 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 09:29:31.796138 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759138171 cert, and key in /tmp/serving-cert-3245563206/serving-signer.crt, /tmp/serving-cert-3245563206/serving-signer.key\\\\nI0929 09:29:32.157414 1 observer_polling.go:159] Starting file observer\\\\nW0929 09:29:32.160455 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 09:29:32.160736 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 09:29:32.163324 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3245563206/tls.crt::/tmp/serving-cert-3245563206/tls.key\\\\\\\"\\\\nF0929 09:29:42.581603 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55099c1030bd4943e0df0b13b1203c7143f79edf5e02c8353ccd042610e8059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:40Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.286945 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:40Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.299760 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f96f3b3b0028e52c62e9b886d5f9c3347ec85f91ec0e5ef3de48bd216a2c687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f10690e584c904be791611af52b807ee3d20cb5551821042a1c5e71e1f1571d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:40Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.314280 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f75b386997d5f82c8a8136e8758dac2b770b2ef60bec255c70978046c3e6cd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:40Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.324776 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.324820 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.324828 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.324842 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.324853 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:40Z","lastTransitionTime":"2025-09-29T09:30:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.325001 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:40Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.337232 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2tkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b0e23f7-a478-48e2-a745-193a90e87553\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10bf019bbe72b02d7f7545cd6f01bde45c32ff2222682d62dac1992cfcdca115\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://944ced544427edda31dc5b05053a08a83a71829a896cb6dba002ca20ee3e56e4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T09:30:35Z\\\",\\\"message\\\":\\\"2025-09-29T09:29:50+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_66248466-581a-43c7-a305-4bf0ed7a54f9\\\\n2025-09-29T09:29:50+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_66248466-581a-43c7-a305-4bf0ed7a54f9 to /host/opt/cni/bin/\\\\n2025-09-29T09:29:50Z [verbose] multus-daemon started\\\\n2025-09-29T09:29:50Z [verbose] Readiness Indicator file check\\\\n2025-09-29T09:30:35Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:30:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5rx8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2tkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:40Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.355286 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0846537-9071-453d-968b-7537a7233656\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://103d18548d558c96520d7c4c31d3ef44abc2905afa5c26ca3f93d2a1cd6ff604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ce06a39c8fd0fba9a0bd1af96ff34cfb10a153801ecff617db4df5e64eb476b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe3edd15b34d8223ba98808b34bee098cb2ffa1ac4fd3e949007acc04303e370\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22cc6e6e78a7eaac01a6c53d5e5527ef3120c8c44bf28813e65038f939e79872\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cc3416bb385ad0ff953b03c268f186747c47a2dfdf11ba34a3b00be5391e90e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8442bd56bf7c2e6708a3b0f6c21ba803d9bc76a84d32647b507d5961a78e8621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8442bd56bf7c2e6708a3b0f6c21ba803d9bc76a84d32647b507d5961a78e8621\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b03f58ad046bc93040f00ae10e8f265099417fcb84c10b4ca1707d6744729648\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b03f58ad046bc93040f00ae10e8f265099417fcb84c10b4ca1707d6744729648\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7e2abd0b0dc5d3b95c510100e11f7f2fa4b05786325b692244ea51633d2f597e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e2abd0b0dc5d3b95c510100e11f7f2fa4b05786325b692244ea51633d2f597e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:40Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.366300 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19a67159-61a5-4f75-940a-212c850bd498\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ff866eb8fdb247e8bfe638be9525a31733ed912047dcbbc7514ab6324675b6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a69616a6ca151dfda518aae66819aac108f73967794661b878db2480ffbb5b16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4bb91e73e9e2666ab82d5a13fee96ed7406f6178863603e607766ae3172d3ee6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2148aa80d9f71c8424530711e10af5b0bd10437655123ac57b87d1c90a280233\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:40Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.377697 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1a5d3a7-37d9-4a87-864c-e4af7f504a19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462a76735c6e380ec32967ba87ebe13a19e1d3687ac87182f01b23ce8fabe8e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7979507aa9ce4c5e1c0f9a0d07716433ab9cebcc02e8b6ba01251ced0bab99f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5lnlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:40Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.398106 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60d71749-dfb5-4095-b11b-b70f1a549b88\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a334fd50a045f97d97f67b45abf88dc6aaf3d7c2121becc6a9744c86c977b070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d79814aeee8cb50cbe9a3153ed1a99a82921ac740553c82f6aafd1d7f628cbdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7851ecb6e73d56037db56d7d12fec0a095403080daa7bbf0d4cd79d9a4fd1bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa8dee8aadd625828a22e22668d8f7295ddc3fe24976be6c8610da1157e28d7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a11183ba612a838b92d401b13a2e0d7d1c19de533ecef56b4305e5a9f0784bb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d6677148b1efa21f5eee1dcbccef6d6ea0492513bfa621b1a862488653bc8f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d18b61ad4ffcd8bc213caf632174b18bbb9e1f17b46ba199720c9f155c292f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c43f1fb41b70a75fdced863cef4c3e186627418facef4d9e5cf4ff1ea6db6432\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T09:30:11Z\\\",\\\"message\\\":\\\"o start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:11Z is after 2025-08-24T17:21:41Z]\\\\nI0929 09:30:11.835139 6393 ovn.go:134] Ensuring zone local for Pod openshift-image-registry/node-ca-gx6f2 in node crc\\\\nI0929 09:30:11.835086 6393 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-network-console/networking-console-plugin_TCP_cluster\\\\\\\", UUID:\\\\\\\"ab0b1d51-5ec6-479b-8881-93dfa8d30337\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-network-console/networking-console-plugin\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-network-console/networking-console-plugin_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Pro\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:30:11Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:30:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e140d46db39380a5737d78e1aa00ad1a5a9cc217de910ca4212beae87764795\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ncxc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:40Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.409074 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fxjq2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c469d18e-5b53-4a41-9305-87ae2f8db671\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c8ba3507185bd18ea878f73feed6ba33fb15a49a9f2575a66a88ac28fedef14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:30:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqdj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://01f30b5162df9f09f1af03ebeb5f7218a68b49bf4ff38cd06f3dad13d5042f88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:30:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqdj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:30:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fxjq2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:40Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.427368 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.427407 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.427416 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.427432 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.427443 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:40Z","lastTransitionTime":"2025-09-29T09:30:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.529840 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.529921 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.529937 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.529952 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.529966 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:40Z","lastTransitionTime":"2025-09-29T09:30:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.632256 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.632298 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.632307 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.632321 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.632329 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:40Z","lastTransitionTime":"2025-09-29T09:30:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.713260 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.713331 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:30:40 crc kubenswrapper[4779]: E0929 09:30:40.713387 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 09:30:40 crc kubenswrapper[4779]: E0929 09:30:40.713482 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.734807 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.734843 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.734851 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.734864 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.734876 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:40Z","lastTransitionTime":"2025-09-29T09:30:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.838034 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.838085 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.838096 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.838111 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.838124 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:40Z","lastTransitionTime":"2025-09-29T09:30:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.941215 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.941288 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.941315 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.941345 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:40 crc kubenswrapper[4779]: I0929 09:30:40.941367 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:40Z","lastTransitionTime":"2025-09-29T09:30:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.044854 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.044950 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.044961 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.044977 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.044986 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:41Z","lastTransitionTime":"2025-09-29T09:30:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.148469 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.148523 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.148537 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.148559 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.148576 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:41Z","lastTransitionTime":"2025-09-29T09:30:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.163020 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-ncxc4_60d71749-dfb5-4095-b11b-b70f1a549b88/ovnkube-controller/3.log" Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.163764 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-ncxc4_60d71749-dfb5-4095-b11b-b70f1a549b88/ovnkube-controller/2.log" Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.166291 4779 generic.go:334] "Generic (PLEG): container finished" podID="60d71749-dfb5-4095-b11b-b70f1a549b88" containerID="1d18b61ad4ffcd8bc213caf632174b18bbb9e1f17b46ba199720c9f155c292f5" exitCode=1 Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.166324 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" event={"ID":"60d71749-dfb5-4095-b11b-b70f1a549b88","Type":"ContainerDied","Data":"1d18b61ad4ffcd8bc213caf632174b18bbb9e1f17b46ba199720c9f155c292f5"} Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.166354 4779 scope.go:117] "RemoveContainer" containerID="c43f1fb41b70a75fdced863cef4c3e186627418facef4d9e5cf4ff1ea6db6432" Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.167240 4779 scope.go:117] "RemoveContainer" containerID="1d18b61ad4ffcd8bc213caf632174b18bbb9e1f17b46ba199720c9f155c292f5" Sep 29 09:30:41 crc kubenswrapper[4779]: E0929 09:30:41.167478 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-ncxc4_openshift-ovn-kubernetes(60d71749-dfb5-4095-b11b-b70f1a549b88)\"" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" podUID="60d71749-dfb5-4095-b11b-b70f1a549b88" Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.179840 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e291840-9db5-4515-810a-190428263dfb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6f627c2a2c06fbe034514d02939db8b7576b8cb753aa65a45a271482cf138c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde5335a7ca2db0e56ded3f2fc58506882df09718e6ada6dd1849b891ebc3dd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0f81c275d3f99ff02d4d105dfff34249cff38150c1aa6199bdc765773f0fa2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33e68ef001a27a30ef12923f95eaaef45667d18c8dab2997491caf12f595ed0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6837fc3f6d91505858a90e49945041b663c864b4ebcbd2ecc086c3575b47c1f7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T09:29:42Z\\\",\\\"message\\\":\\\"W0929 09:29:31.795697 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 09:29:31.796138 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759138171 cert, and key in /tmp/serving-cert-3245563206/serving-signer.crt, /tmp/serving-cert-3245563206/serving-signer.key\\\\nI0929 09:29:32.157414 1 observer_polling.go:159] Starting file observer\\\\nW0929 09:29:32.160455 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 09:29:32.160736 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 09:29:32.163324 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3245563206/tls.crt::/tmp/serving-cert-3245563206/tls.key\\\\\\\"\\\\nF0929 09:29:42.581603 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55099c1030bd4943e0df0b13b1203c7143f79edf5e02c8353ccd042610e8059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:41Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.190815 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:41Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.204752 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f96f3b3b0028e52c62e9b886d5f9c3347ec85f91ec0e5ef3de48bd216a2c687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f10690e584c904be791611af52b807ee3d20cb5551821042a1c5e71e1f1571d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:41Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.214771 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f75b386997d5f82c8a8136e8758dac2b770b2ef60bec255c70978046c3e6cd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:41Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.224722 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:41Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.237608 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2tkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b0e23f7-a478-48e2-a745-193a90e87553\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10bf019bbe72b02d7f7545cd6f01bde45c32ff2222682d62dac1992cfcdca115\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://944ced544427edda31dc5b05053a08a83a71829a896cb6dba002ca20ee3e56e4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T09:30:35Z\\\",\\\"message\\\":\\\"2025-09-29T09:29:50+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_66248466-581a-43c7-a305-4bf0ed7a54f9\\\\n2025-09-29T09:29:50+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_66248466-581a-43c7-a305-4bf0ed7a54f9 to /host/opt/cni/bin/\\\\n2025-09-29T09:29:50Z [verbose] multus-daemon started\\\\n2025-09-29T09:29:50Z [verbose] Readiness Indicator file check\\\\n2025-09-29T09:30:35Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:30:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5rx8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2tkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:41Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.249995 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-twvvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3a42c70c25df51312e2e5ede663678cbccf94d50dad43886557024669dd2e82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfe38229bcd948f0f8e1e234c8e4ea16d4dbc8c5c4252f5486048db0c9910412\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfe38229bcd948f0f8e1e234c8e4ea16d4dbc8c5c4252f5486048db0c9910412\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acbf355d9614f64771657a4103437e9c55f3b2b5080282d7ad3af70a5f44ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbf355d9614f64771657a4103437e9c55f3b2b5080282d7ad3af70a5f44ffce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b854329d7e329a9099a032b8e91ab2064164ae0e43e57b61c5c55b42fca2a9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b854329d7e329a9099a032b8e91ab2064164ae0e43e57b61c5c55b42fca2a9ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93312b300c9348ca8caa0f12bd7825d6b7d1d720babd765aa679c570220ac15a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93312b300c9348ca8caa0f12bd7825d6b7d1d720babd765aa679c570220ac15a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-twvvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:41Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.250970 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.251005 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.251017 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.251031 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.251040 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:41Z","lastTransitionTime":"2025-09-29T09:30:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.270113 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0846537-9071-453d-968b-7537a7233656\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://103d18548d558c96520d7c4c31d3ef44abc2905afa5c26ca3f93d2a1cd6ff604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ce06a39c8fd0fba9a0bd1af96ff34cfb10a153801ecff617db4df5e64eb476b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe3edd15b34d8223ba98808b34bee098cb2ffa1ac4fd3e949007acc04303e370\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22cc6e6e78a7eaac01a6c53d5e5527ef3120c8c44bf28813e65038f939e79872\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cc3416bb385ad0ff953b03c268f186747c47a2dfdf11ba34a3b00be5391e90e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8442bd56bf7c2e6708a3b0f6c21ba803d9bc76a84d32647b507d5961a78e8621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8442bd56bf7c2e6708a3b0f6c21ba803d9bc76a84d32647b507d5961a78e8621\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b03f58ad046bc93040f00ae10e8f265099417fcb84c10b4ca1707d6744729648\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b03f58ad046bc93040f00ae10e8f265099417fcb84c10b4ca1707d6744729648\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7e2abd0b0dc5d3b95c510100e11f7f2fa4b05786325b692244ea51633d2f597e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e2abd0b0dc5d3b95c510100e11f7f2fa4b05786325b692244ea51633d2f597e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:41Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.281037 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19a67159-61a5-4f75-940a-212c850bd498\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ff866eb8fdb247e8bfe638be9525a31733ed912047dcbbc7514ab6324675b6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a69616a6ca151dfda518aae66819aac108f73967794661b878db2480ffbb5b16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4bb91e73e9e2666ab82d5a13fee96ed7406f6178863603e607766ae3172d3ee6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2148aa80d9f71c8424530711e10af5b0bd10437655123ac57b87d1c90a280233\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:41Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.291850 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1a5d3a7-37d9-4a87-864c-e4af7f504a19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462a76735c6e380ec32967ba87ebe13a19e1d3687ac87182f01b23ce8fabe8e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7979507aa9ce4c5e1c0f9a0d07716433ab9cebcc02e8b6ba01251ced0bab99f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5lnlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:41Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.307302 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60d71749-dfb5-4095-b11b-b70f1a549b88\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a334fd50a045f97d97f67b45abf88dc6aaf3d7c2121becc6a9744c86c977b070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d79814aeee8cb50cbe9a3153ed1a99a82921ac740553c82f6aafd1d7f628cbdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7851ecb6e73d56037db56d7d12fec0a095403080daa7bbf0d4cd79d9a4fd1bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa8dee8aadd625828a22e22668d8f7295ddc3fe24976be6c8610da1157e28d7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a11183ba612a838b92d401b13a2e0d7d1c19de533ecef56b4305e5a9f0784bb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d6677148b1efa21f5eee1dcbccef6d6ea0492513bfa621b1a862488653bc8f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d18b61ad4ffcd8bc213caf632174b18bbb9e1f17b46ba199720c9f155c292f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c43f1fb41b70a75fdced863cef4c3e186627418facef4d9e5cf4ff1ea6db6432\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T09:30:11Z\\\",\\\"message\\\":\\\"o start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:11Z is after 2025-08-24T17:21:41Z]\\\\nI0929 09:30:11.835139 6393 ovn.go:134] Ensuring zone local for Pod openshift-image-registry/node-ca-gx6f2 in node crc\\\\nI0929 09:30:11.835086 6393 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-network-console/networking-console-plugin_TCP_cluster\\\\\\\", UUID:\\\\\\\"ab0b1d51-5ec6-479b-8881-93dfa8d30337\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-network-console/networking-console-plugin\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-network-console/networking-console-plugin_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Pro\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:30:11Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d18b61ad4ffcd8bc213caf632174b18bbb9e1f17b46ba199720c9f155c292f5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T09:30:40Z\\\",\\\"message\\\":\\\"t.io/serving-cert-secret-name:kube-controller-manager-operator-serving-cert service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc0076d63df \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{0 8443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{app: kube-controller-manager-operator,},ClusterIP:10.217.4.219,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.4.219],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI0929 09:30:40.601664 6756 services_controller.go:444] Built service openshift-dns/dns-default LB per-node configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.4.10\\\\\\\"}, protocol:\\\\\\\"UDP\\\\\\\", inport:53, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[stri\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:30:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e140d46db39380a5737d78e1aa00ad1a5a9cc217de910ca4212beae87764795\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ncxc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:41Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.317944 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fxjq2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c469d18e-5b53-4a41-9305-87ae2f8db671\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c8ba3507185bd18ea878f73feed6ba33fb15a49a9f2575a66a88ac28fedef14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:30:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqdj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://01f30b5162df9f09f1af03ebeb5f7218a68b49bf4ff38cd06f3dad13d5042f88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:30:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqdj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:30:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fxjq2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:41Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.329778 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d52ae941-3523-4957-aa78-7c744b7a14d3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://813ea0b64e4e968f7808a4f272fe8c987b24c23a26e2fd7e1fe2f2bbd3b0498e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://def414c582c214da578be9ef1b899f09185c7b6318cd6b191920d6293e89873f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e18a438c1a81da7779de1fc69aeff90acf287ced68bb63bf1faac1ba595ec528\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aeb35312a74983b7c72a2349d1b461968b67ae4cf725beeaa8699fd1c9c5845e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aeb35312a74983b7c72a2349d1b461968b67ae4cf725beeaa8699fd1c9c5845e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:41Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.340958 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:41Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.353100 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2adc78ec8f54811c8fd09a5e558132f9f57501d9a92d4738935148bb4e714eb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:41Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.353420 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.353525 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.353597 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.353843 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.353924 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:41Z","lastTransitionTime":"2025-09-29T09:30:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.361842 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5584" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"564bff56-93cb-42ac-bd34-bbe97f99f411\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedbde6cf29bb77c762d7da2d2e414d3e7c2c6edb3addd97473367b89b2e3fe7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvwtv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5584\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:41Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.369855 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gx6f2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25e887ba-720d-4f7b-9763-5703781fd8bf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a1604b3eb7d80cc28d7d36550e740405d3012bb83109444bf7354793ebbdeb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9lw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gx6f2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:41Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.379797 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-qvlbd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"294a4484-da93-4c37-9ecf-18f68f4ad64d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nsgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nsgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:30:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-qvlbd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:41Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.455984 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.456061 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.456074 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.456093 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.456128 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:41Z","lastTransitionTime":"2025-09-29T09:30:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.558445 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.558487 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.558501 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.558518 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.558529 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:41Z","lastTransitionTime":"2025-09-29T09:30:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.661231 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.661281 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.661294 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.661312 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.661323 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:41Z","lastTransitionTime":"2025-09-29T09:30:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.714042 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.714065 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:30:41 crc kubenswrapper[4779]: E0929 09:30:41.714168 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvlbd" podUID="294a4484-da93-4c37-9ecf-18f68f4ad64d" Sep 29 09:30:41 crc kubenswrapper[4779]: E0929 09:30:41.714375 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.764994 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.765323 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.765468 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.765591 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.765706 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:41Z","lastTransitionTime":"2025-09-29T09:30:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.869376 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.869615 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.869659 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.869794 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.869821 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:41Z","lastTransitionTime":"2025-09-29T09:30:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.972591 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.972623 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.972634 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.972669 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:41 crc kubenswrapper[4779]: I0929 09:30:41.972680 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:41Z","lastTransitionTime":"2025-09-29T09:30:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.075521 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.075567 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.075577 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.075592 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.075603 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:42Z","lastTransitionTime":"2025-09-29T09:30:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.171093 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-ncxc4_60d71749-dfb5-4095-b11b-b70f1a549b88/ovnkube-controller/3.log" Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.175679 4779 scope.go:117] "RemoveContainer" containerID="1d18b61ad4ffcd8bc213caf632174b18bbb9e1f17b46ba199720c9f155c292f5" Sep 29 09:30:42 crc kubenswrapper[4779]: E0929 09:30:42.176243 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-ncxc4_openshift-ovn-kubernetes(60d71749-dfb5-4095-b11b-b70f1a549b88)\"" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" podUID="60d71749-dfb5-4095-b11b-b70f1a549b88" Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.179069 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.179128 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.179145 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.179168 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.179188 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:42Z","lastTransitionTime":"2025-09-29T09:30:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.191625 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-qvlbd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"294a4484-da93-4c37-9ecf-18f68f4ad64d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nsgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nsgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:30:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-qvlbd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:42Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.208316 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2adc78ec8f54811c8fd09a5e558132f9f57501d9a92d4738935148bb4e714eb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:42Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.220196 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5584" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"564bff56-93cb-42ac-bd34-bbe97f99f411\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedbde6cf29bb77c762d7da2d2e414d3e7c2c6edb3addd97473367b89b2e3fe7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvwtv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5584\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:42Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.233768 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gx6f2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25e887ba-720d-4f7b-9763-5703781fd8bf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a1604b3eb7d80cc28d7d36550e740405d3012bb83109444bf7354793ebbdeb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9lw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gx6f2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:42Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.250330 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f96f3b3b0028e52c62e9b886d5f9c3347ec85f91ec0e5ef3de48bd216a2c687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f10690e584c904be791611af52b807ee3d20cb5551821042a1c5e71e1f1571d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:42Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.263977 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f75b386997d5f82c8a8136e8758dac2b770b2ef60bec255c70978046c3e6cd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:42Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.276386 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:42Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.294491 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.294853 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.295084 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.295267 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.295409 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:42Z","lastTransitionTime":"2025-09-29T09:30:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.295389 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2tkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b0e23f7-a478-48e2-a745-193a90e87553\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10bf019bbe72b02d7f7545cd6f01bde45c32ff2222682d62dac1992cfcdca115\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://944ced544427edda31dc5b05053a08a83a71829a896cb6dba002ca20ee3e56e4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T09:30:35Z\\\",\\\"message\\\":\\\"2025-09-29T09:29:50+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_66248466-581a-43c7-a305-4bf0ed7a54f9\\\\n2025-09-29T09:29:50+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_66248466-581a-43c7-a305-4bf0ed7a54f9 to /host/opt/cni/bin/\\\\n2025-09-29T09:29:50Z [verbose] multus-daemon started\\\\n2025-09-29T09:29:50Z [verbose] Readiness Indicator file check\\\\n2025-09-29T09:30:35Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:30:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5rx8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2tkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:42Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.313680 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-twvvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3a42c70c25df51312e2e5ede663678cbccf94d50dad43886557024669dd2e82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfe38229bcd948f0f8e1e234c8e4ea16d4dbc8c5c4252f5486048db0c9910412\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfe38229bcd948f0f8e1e234c8e4ea16d4dbc8c5c4252f5486048db0c9910412\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acbf355d9614f64771657a4103437e9c55f3b2b5080282d7ad3af70a5f44ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbf355d9614f64771657a4103437e9c55f3b2b5080282d7ad3af70a5f44ffce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b854329d7e329a9099a032b8e91ab2064164ae0e43e57b61c5c55b42fca2a9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b854329d7e329a9099a032b8e91ab2064164ae0e43e57b61c5c55b42fca2a9ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93312b300c9348ca8caa0f12bd7825d6b7d1d720babd765aa679c570220ac15a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93312b300c9348ca8caa0f12bd7825d6b7d1d720babd765aa679c570220ac15a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-twvvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:42Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.328919 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e291840-9db5-4515-810a-190428263dfb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6f627c2a2c06fbe034514d02939db8b7576b8cb753aa65a45a271482cf138c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde5335a7ca2db0e56ded3f2fc58506882df09718e6ada6dd1849b891ebc3dd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0f81c275d3f99ff02d4d105dfff34249cff38150c1aa6199bdc765773f0fa2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33e68ef001a27a30ef12923f95eaaef45667d18c8dab2997491caf12f595ed0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6837fc3f6d91505858a90e49945041b663c864b4ebcbd2ecc086c3575b47c1f7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T09:29:42Z\\\",\\\"message\\\":\\\"W0929 09:29:31.795697 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 09:29:31.796138 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759138171 cert, and key in /tmp/serving-cert-3245563206/serving-signer.crt, /tmp/serving-cert-3245563206/serving-signer.key\\\\nI0929 09:29:32.157414 1 observer_polling.go:159] Starting file observer\\\\nW0929 09:29:32.160455 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 09:29:32.160736 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 09:29:32.163324 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3245563206/tls.crt::/tmp/serving-cert-3245563206/tls.key\\\\\\\"\\\\nF0929 09:29:42.581603 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55099c1030bd4943e0df0b13b1203c7143f79edf5e02c8353ccd042610e8059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:42Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.343259 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:42Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.363643 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60d71749-dfb5-4095-b11b-b70f1a549b88\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a334fd50a045f97d97f67b45abf88dc6aaf3d7c2121becc6a9744c86c977b070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d79814aeee8cb50cbe9a3153ed1a99a82921ac740553c82f6aafd1d7f628cbdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7851ecb6e73d56037db56d7d12fec0a095403080daa7bbf0d4cd79d9a4fd1bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa8dee8aadd625828a22e22668d8f7295ddc3fe24976be6c8610da1157e28d7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a11183ba612a838b92d401b13a2e0d7d1c19de533ecef56b4305e5a9f0784bb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d6677148b1efa21f5eee1dcbccef6d6ea0492513bfa621b1a862488653bc8f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d18b61ad4ffcd8bc213caf632174b18bbb9e1f17b46ba199720c9f155c292f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d18b61ad4ffcd8bc213caf632174b18bbb9e1f17b46ba199720c9f155c292f5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T09:30:40Z\\\",\\\"message\\\":\\\"t.io/serving-cert-secret-name:kube-controller-manager-operator-serving-cert service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc0076d63df \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{0 8443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{app: kube-controller-manager-operator,},ClusterIP:10.217.4.219,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.4.219],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI0929 09:30:40.601664 6756 services_controller.go:444] Built service openshift-dns/dns-default LB per-node configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.4.10\\\\\\\"}, protocol:\\\\\\\"UDP\\\\\\\", inport:53, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[stri\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:30:39Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-ncxc4_openshift-ovn-kubernetes(60d71749-dfb5-4095-b11b-b70f1a549b88)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e140d46db39380a5737d78e1aa00ad1a5a9cc217de910ca4212beae87764795\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ncxc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:42Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.377345 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fxjq2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c469d18e-5b53-4a41-9305-87ae2f8db671\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c8ba3507185bd18ea878f73feed6ba33fb15a49a9f2575a66a88ac28fedef14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:30:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqdj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://01f30b5162df9f09f1af03ebeb5f7218a68b49bf4ff38cd06f3dad13d5042f88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:30:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqdj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:30:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fxjq2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:42Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.398231 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0846537-9071-453d-968b-7537a7233656\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://103d18548d558c96520d7c4c31d3ef44abc2905afa5c26ca3f93d2a1cd6ff604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ce06a39c8fd0fba9a0bd1af96ff34cfb10a153801ecff617db4df5e64eb476b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe3edd15b34d8223ba98808b34bee098cb2ffa1ac4fd3e949007acc04303e370\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22cc6e6e78a7eaac01a6c53d5e5527ef3120c8c44bf28813e65038f939e79872\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cc3416bb385ad0ff953b03c268f186747c47a2dfdf11ba34a3b00be5391e90e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8442bd56bf7c2e6708a3b0f6c21ba803d9bc76a84d32647b507d5961a78e8621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8442bd56bf7c2e6708a3b0f6c21ba803d9bc76a84d32647b507d5961a78e8621\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b03f58ad046bc93040f00ae10e8f265099417fcb84c10b4ca1707d6744729648\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b03f58ad046bc93040f00ae10e8f265099417fcb84c10b4ca1707d6744729648\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7e2abd0b0dc5d3b95c510100e11f7f2fa4b05786325b692244ea51633d2f597e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e2abd0b0dc5d3b95c510100e11f7f2fa4b05786325b692244ea51633d2f597e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:42Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.398528 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.398555 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.398564 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.398611 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.398630 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:42Z","lastTransitionTime":"2025-09-29T09:30:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.415523 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19a67159-61a5-4f75-940a-212c850bd498\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ff866eb8fdb247e8bfe638be9525a31733ed912047dcbbc7514ab6324675b6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a69616a6ca151dfda518aae66819aac108f73967794661b878db2480ffbb5b16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4bb91e73e9e2666ab82d5a13fee96ed7406f6178863603e607766ae3172d3ee6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2148aa80d9f71c8424530711e10af5b0bd10437655123ac57b87d1c90a280233\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:42Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.428057 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1a5d3a7-37d9-4a87-864c-e4af7f504a19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462a76735c6e380ec32967ba87ebe13a19e1d3687ac87182f01b23ce8fabe8e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7979507aa9ce4c5e1c0f9a0d07716433ab9cebcc02e8b6ba01251ced0bab99f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5lnlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:42Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.443361 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d52ae941-3523-4957-aa78-7c744b7a14d3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://813ea0b64e4e968f7808a4f272fe8c987b24c23a26e2fd7e1fe2f2bbd3b0498e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://def414c582c214da578be9ef1b899f09185c7b6318cd6b191920d6293e89873f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e18a438c1a81da7779de1fc69aeff90acf287ced68bb63bf1faac1ba595ec528\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aeb35312a74983b7c72a2349d1b461968b67ae4cf725beeaa8699fd1c9c5845e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aeb35312a74983b7c72a2349d1b461968b67ae4cf725beeaa8699fd1c9c5845e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:42Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.453847 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:42Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.501154 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.501199 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.501213 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.501234 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.501250 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:42Z","lastTransitionTime":"2025-09-29T09:30:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.603841 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.603883 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.603893 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.603944 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.603963 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:42Z","lastTransitionTime":"2025-09-29T09:30:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.706033 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.706068 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.706077 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.706106 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.706116 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:42Z","lastTransitionTime":"2025-09-29T09:30:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.713876 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:30:42 crc kubenswrapper[4779]: E0929 09:30:42.713997 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.714159 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:30:42 crc kubenswrapper[4779]: E0929 09:30:42.714361 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.808226 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.808295 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.808308 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.808324 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.808336 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:42Z","lastTransitionTime":"2025-09-29T09:30:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.911146 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.911198 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.911211 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.911230 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:42 crc kubenswrapper[4779]: I0929 09:30:42.911246 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:42Z","lastTransitionTime":"2025-09-29T09:30:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:43 crc kubenswrapper[4779]: I0929 09:30:43.014548 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:43 crc kubenswrapper[4779]: I0929 09:30:43.014647 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:43 crc kubenswrapper[4779]: I0929 09:30:43.014666 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:43 crc kubenswrapper[4779]: I0929 09:30:43.014690 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:43 crc kubenswrapper[4779]: I0929 09:30:43.014708 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:43Z","lastTransitionTime":"2025-09-29T09:30:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:43 crc kubenswrapper[4779]: I0929 09:30:43.117412 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:43 crc kubenswrapper[4779]: I0929 09:30:43.117436 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:43 crc kubenswrapper[4779]: I0929 09:30:43.117444 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:43 crc kubenswrapper[4779]: I0929 09:30:43.117456 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:43 crc kubenswrapper[4779]: I0929 09:30:43.117464 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:43Z","lastTransitionTime":"2025-09-29T09:30:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:43 crc kubenswrapper[4779]: I0929 09:30:43.219673 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:43 crc kubenswrapper[4779]: I0929 09:30:43.219729 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:43 crc kubenswrapper[4779]: I0929 09:30:43.219739 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:43 crc kubenswrapper[4779]: I0929 09:30:43.219753 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:43 crc kubenswrapper[4779]: I0929 09:30:43.219765 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:43Z","lastTransitionTime":"2025-09-29T09:30:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:43 crc kubenswrapper[4779]: I0929 09:30:43.322672 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:43 crc kubenswrapper[4779]: I0929 09:30:43.323188 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:43 crc kubenswrapper[4779]: I0929 09:30:43.323209 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:43 crc kubenswrapper[4779]: I0929 09:30:43.323230 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:43 crc kubenswrapper[4779]: I0929 09:30:43.323287 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:43Z","lastTransitionTime":"2025-09-29T09:30:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:43 crc kubenswrapper[4779]: I0929 09:30:43.426479 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:43 crc kubenswrapper[4779]: I0929 09:30:43.426843 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:43 crc kubenswrapper[4779]: I0929 09:30:43.427050 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:43 crc kubenswrapper[4779]: I0929 09:30:43.427217 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:43 crc kubenswrapper[4779]: I0929 09:30:43.427359 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:43Z","lastTransitionTime":"2025-09-29T09:30:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:43 crc kubenswrapper[4779]: I0929 09:30:43.530542 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:43 crc kubenswrapper[4779]: I0929 09:30:43.531052 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:43 crc kubenswrapper[4779]: I0929 09:30:43.531122 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:43 crc kubenswrapper[4779]: I0929 09:30:43.531200 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:43 crc kubenswrapper[4779]: I0929 09:30:43.531270 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:43Z","lastTransitionTime":"2025-09-29T09:30:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:43 crc kubenswrapper[4779]: I0929 09:30:43.634285 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:43 crc kubenswrapper[4779]: I0929 09:30:43.634323 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:43 crc kubenswrapper[4779]: I0929 09:30:43.634334 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:43 crc kubenswrapper[4779]: I0929 09:30:43.634350 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:43 crc kubenswrapper[4779]: I0929 09:30:43.634362 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:43Z","lastTransitionTime":"2025-09-29T09:30:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:43 crc kubenswrapper[4779]: I0929 09:30:43.713877 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:30:43 crc kubenswrapper[4779]: I0929 09:30:43.713972 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:30:43 crc kubenswrapper[4779]: E0929 09:30:43.714114 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 09:30:43 crc kubenswrapper[4779]: E0929 09:30:43.714252 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvlbd" podUID="294a4484-da93-4c37-9ecf-18f68f4ad64d" Sep 29 09:30:43 crc kubenswrapper[4779]: I0929 09:30:43.737832 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:43 crc kubenswrapper[4779]: I0929 09:30:43.738149 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:43 crc kubenswrapper[4779]: I0929 09:30:43.738299 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:43 crc kubenswrapper[4779]: I0929 09:30:43.738455 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:43 crc kubenswrapper[4779]: I0929 09:30:43.738586 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:43Z","lastTransitionTime":"2025-09-29T09:30:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:43 crc kubenswrapper[4779]: I0929 09:30:43.841240 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:43 crc kubenswrapper[4779]: I0929 09:30:43.841303 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:43 crc kubenswrapper[4779]: I0929 09:30:43.841320 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:43 crc kubenswrapper[4779]: I0929 09:30:43.841346 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:43 crc kubenswrapper[4779]: I0929 09:30:43.841363 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:43Z","lastTransitionTime":"2025-09-29T09:30:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:43 crc kubenswrapper[4779]: I0929 09:30:43.944784 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:43 crc kubenswrapper[4779]: I0929 09:30:43.944840 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:43 crc kubenswrapper[4779]: I0929 09:30:43.944861 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:43 crc kubenswrapper[4779]: I0929 09:30:43.944888 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:43 crc kubenswrapper[4779]: I0929 09:30:43.944938 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:43Z","lastTransitionTime":"2025-09-29T09:30:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:44 crc kubenswrapper[4779]: I0929 09:30:44.046936 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:44 crc kubenswrapper[4779]: I0929 09:30:44.046972 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:44 crc kubenswrapper[4779]: I0929 09:30:44.046980 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:44 crc kubenswrapper[4779]: I0929 09:30:44.046992 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:44 crc kubenswrapper[4779]: I0929 09:30:44.047001 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:44Z","lastTransitionTime":"2025-09-29T09:30:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:44 crc kubenswrapper[4779]: I0929 09:30:44.150482 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:44 crc kubenswrapper[4779]: I0929 09:30:44.150539 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:44 crc kubenswrapper[4779]: I0929 09:30:44.150554 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:44 crc kubenswrapper[4779]: I0929 09:30:44.150574 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:44 crc kubenswrapper[4779]: I0929 09:30:44.150587 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:44Z","lastTransitionTime":"2025-09-29T09:30:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:44 crc kubenswrapper[4779]: I0929 09:30:44.253742 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:44 crc kubenswrapper[4779]: I0929 09:30:44.253827 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:44 crc kubenswrapper[4779]: I0929 09:30:44.253844 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:44 crc kubenswrapper[4779]: I0929 09:30:44.253870 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:44 crc kubenswrapper[4779]: I0929 09:30:44.253888 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:44Z","lastTransitionTime":"2025-09-29T09:30:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:44 crc kubenswrapper[4779]: I0929 09:30:44.355859 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:44 crc kubenswrapper[4779]: I0929 09:30:44.355997 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:44 crc kubenswrapper[4779]: I0929 09:30:44.356007 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:44 crc kubenswrapper[4779]: I0929 09:30:44.356019 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:44 crc kubenswrapper[4779]: I0929 09:30:44.356028 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:44Z","lastTransitionTime":"2025-09-29T09:30:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:44 crc kubenswrapper[4779]: I0929 09:30:44.458464 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:44 crc kubenswrapper[4779]: I0929 09:30:44.458506 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:44 crc kubenswrapper[4779]: I0929 09:30:44.458515 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:44 crc kubenswrapper[4779]: I0929 09:30:44.458529 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:44 crc kubenswrapper[4779]: I0929 09:30:44.458540 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:44Z","lastTransitionTime":"2025-09-29T09:30:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:44 crc kubenswrapper[4779]: I0929 09:30:44.561465 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:44 crc kubenswrapper[4779]: I0929 09:30:44.561538 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:44 crc kubenswrapper[4779]: I0929 09:30:44.561561 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:44 crc kubenswrapper[4779]: I0929 09:30:44.561590 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:44 crc kubenswrapper[4779]: I0929 09:30:44.561615 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:44Z","lastTransitionTime":"2025-09-29T09:30:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:44 crc kubenswrapper[4779]: I0929 09:30:44.665015 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:44 crc kubenswrapper[4779]: I0929 09:30:44.665077 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:44 crc kubenswrapper[4779]: I0929 09:30:44.665097 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:44 crc kubenswrapper[4779]: I0929 09:30:44.665121 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:44 crc kubenswrapper[4779]: I0929 09:30:44.665138 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:44Z","lastTransitionTime":"2025-09-29T09:30:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:44 crc kubenswrapper[4779]: I0929 09:30:44.713522 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:30:44 crc kubenswrapper[4779]: I0929 09:30:44.713580 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:30:44 crc kubenswrapper[4779]: E0929 09:30:44.713703 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 09:30:44 crc kubenswrapper[4779]: E0929 09:30:44.713939 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 09:30:44 crc kubenswrapper[4779]: I0929 09:30:44.767950 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:44 crc kubenswrapper[4779]: I0929 09:30:44.768046 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:44 crc kubenswrapper[4779]: I0929 09:30:44.768057 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:44 crc kubenswrapper[4779]: I0929 09:30:44.768071 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:44 crc kubenswrapper[4779]: I0929 09:30:44.768082 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:44Z","lastTransitionTime":"2025-09-29T09:30:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:44 crc kubenswrapper[4779]: I0929 09:30:44.871146 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:44 crc kubenswrapper[4779]: I0929 09:30:44.871216 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:44 crc kubenswrapper[4779]: I0929 09:30:44.871232 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:44 crc kubenswrapper[4779]: I0929 09:30:44.871255 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:44 crc kubenswrapper[4779]: I0929 09:30:44.871274 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:44Z","lastTransitionTime":"2025-09-29T09:30:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:44 crc kubenswrapper[4779]: I0929 09:30:44.973776 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:44 crc kubenswrapper[4779]: I0929 09:30:44.973838 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:44 crc kubenswrapper[4779]: I0929 09:30:44.973854 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:44 crc kubenswrapper[4779]: I0929 09:30:44.973877 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:44 crc kubenswrapper[4779]: I0929 09:30:44.973894 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:44Z","lastTransitionTime":"2025-09-29T09:30:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:45 crc kubenswrapper[4779]: I0929 09:30:45.076584 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:45 crc kubenswrapper[4779]: I0929 09:30:45.076658 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:45 crc kubenswrapper[4779]: I0929 09:30:45.076684 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:45 crc kubenswrapper[4779]: I0929 09:30:45.076713 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:45 crc kubenswrapper[4779]: I0929 09:30:45.076731 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:45Z","lastTransitionTime":"2025-09-29T09:30:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:45 crc kubenswrapper[4779]: I0929 09:30:45.180373 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:45 crc kubenswrapper[4779]: I0929 09:30:45.180425 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:45 crc kubenswrapper[4779]: I0929 09:30:45.180439 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:45 crc kubenswrapper[4779]: I0929 09:30:45.180458 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:45 crc kubenswrapper[4779]: I0929 09:30:45.180471 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:45Z","lastTransitionTime":"2025-09-29T09:30:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:45 crc kubenswrapper[4779]: I0929 09:30:45.284027 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:45 crc kubenswrapper[4779]: I0929 09:30:45.284424 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:45 crc kubenswrapper[4779]: I0929 09:30:45.284633 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:45 crc kubenswrapper[4779]: I0929 09:30:45.284846 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:45 crc kubenswrapper[4779]: I0929 09:30:45.285086 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:45Z","lastTransitionTime":"2025-09-29T09:30:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:45 crc kubenswrapper[4779]: I0929 09:30:45.387991 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:45 crc kubenswrapper[4779]: I0929 09:30:45.388058 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:45 crc kubenswrapper[4779]: I0929 09:30:45.388081 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:45 crc kubenswrapper[4779]: I0929 09:30:45.388109 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:45 crc kubenswrapper[4779]: I0929 09:30:45.388130 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:45Z","lastTransitionTime":"2025-09-29T09:30:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:45 crc kubenswrapper[4779]: I0929 09:30:45.491749 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:45 crc kubenswrapper[4779]: I0929 09:30:45.492165 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:45 crc kubenswrapper[4779]: I0929 09:30:45.492339 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:45 crc kubenswrapper[4779]: I0929 09:30:45.492508 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:45 crc kubenswrapper[4779]: I0929 09:30:45.492637 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:45Z","lastTransitionTime":"2025-09-29T09:30:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:45 crc kubenswrapper[4779]: I0929 09:30:45.596334 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:45 crc kubenswrapper[4779]: I0929 09:30:45.596360 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:45 crc kubenswrapper[4779]: I0929 09:30:45.596370 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:45 crc kubenswrapper[4779]: I0929 09:30:45.596387 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:45 crc kubenswrapper[4779]: I0929 09:30:45.596398 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:45Z","lastTransitionTime":"2025-09-29T09:30:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:45 crc kubenswrapper[4779]: I0929 09:30:45.699244 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:45 crc kubenswrapper[4779]: I0929 09:30:45.699292 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:45 crc kubenswrapper[4779]: I0929 09:30:45.699302 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:45 crc kubenswrapper[4779]: I0929 09:30:45.699314 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:45 crc kubenswrapper[4779]: I0929 09:30:45.699323 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:45Z","lastTransitionTime":"2025-09-29T09:30:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:45 crc kubenswrapper[4779]: I0929 09:30:45.713683 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:30:45 crc kubenswrapper[4779]: E0929 09:30:45.713806 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 09:30:45 crc kubenswrapper[4779]: I0929 09:30:45.713984 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:30:45 crc kubenswrapper[4779]: E0929 09:30:45.714035 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvlbd" podUID="294a4484-da93-4c37-9ecf-18f68f4ad64d" Sep 29 09:30:45 crc kubenswrapper[4779]: I0929 09:30:45.802045 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:45 crc kubenswrapper[4779]: I0929 09:30:45.802129 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:45 crc kubenswrapper[4779]: I0929 09:30:45.802142 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:45 crc kubenswrapper[4779]: I0929 09:30:45.802157 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:45 crc kubenswrapper[4779]: I0929 09:30:45.802167 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:45Z","lastTransitionTime":"2025-09-29T09:30:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:45 crc kubenswrapper[4779]: I0929 09:30:45.905038 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:45 crc kubenswrapper[4779]: I0929 09:30:45.905116 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:45 crc kubenswrapper[4779]: I0929 09:30:45.905139 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:45 crc kubenswrapper[4779]: I0929 09:30:45.905169 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:45 crc kubenswrapper[4779]: I0929 09:30:45.905191 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:45Z","lastTransitionTime":"2025-09-29T09:30:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:46 crc kubenswrapper[4779]: I0929 09:30:46.008647 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:46 crc kubenswrapper[4779]: I0929 09:30:46.008699 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:46 crc kubenswrapper[4779]: I0929 09:30:46.008715 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:46 crc kubenswrapper[4779]: I0929 09:30:46.008743 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:46 crc kubenswrapper[4779]: I0929 09:30:46.008761 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:46Z","lastTransitionTime":"2025-09-29T09:30:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:46 crc kubenswrapper[4779]: I0929 09:30:46.112046 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:46 crc kubenswrapper[4779]: I0929 09:30:46.112098 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:46 crc kubenswrapper[4779]: I0929 09:30:46.112113 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:46 crc kubenswrapper[4779]: I0929 09:30:46.112134 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:46 crc kubenswrapper[4779]: I0929 09:30:46.112150 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:46Z","lastTransitionTime":"2025-09-29T09:30:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:46 crc kubenswrapper[4779]: I0929 09:30:46.214540 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:46 crc kubenswrapper[4779]: I0929 09:30:46.214580 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:46 crc kubenswrapper[4779]: I0929 09:30:46.214588 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:46 crc kubenswrapper[4779]: I0929 09:30:46.214602 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:46 crc kubenswrapper[4779]: I0929 09:30:46.214612 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:46Z","lastTransitionTime":"2025-09-29T09:30:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:46 crc kubenswrapper[4779]: I0929 09:30:46.318048 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:46 crc kubenswrapper[4779]: I0929 09:30:46.318113 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:46 crc kubenswrapper[4779]: I0929 09:30:46.318131 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:46 crc kubenswrapper[4779]: I0929 09:30:46.318155 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:46 crc kubenswrapper[4779]: I0929 09:30:46.318173 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:46Z","lastTransitionTime":"2025-09-29T09:30:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:46 crc kubenswrapper[4779]: I0929 09:30:46.420772 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:46 crc kubenswrapper[4779]: I0929 09:30:46.421187 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:46 crc kubenswrapper[4779]: I0929 09:30:46.421332 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:46 crc kubenswrapper[4779]: I0929 09:30:46.421469 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:46 crc kubenswrapper[4779]: I0929 09:30:46.421610 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:46Z","lastTransitionTime":"2025-09-29T09:30:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:46 crc kubenswrapper[4779]: I0929 09:30:46.524976 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:46 crc kubenswrapper[4779]: I0929 09:30:46.525059 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:46 crc kubenswrapper[4779]: I0929 09:30:46.525080 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:46 crc kubenswrapper[4779]: I0929 09:30:46.525104 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:46 crc kubenswrapper[4779]: I0929 09:30:46.525121 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:46Z","lastTransitionTime":"2025-09-29T09:30:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:46 crc kubenswrapper[4779]: I0929 09:30:46.627936 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:46 crc kubenswrapper[4779]: I0929 09:30:46.627987 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:46 crc kubenswrapper[4779]: I0929 09:30:46.628011 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:46 crc kubenswrapper[4779]: I0929 09:30:46.628038 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:46 crc kubenswrapper[4779]: I0929 09:30:46.628060 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:46Z","lastTransitionTime":"2025-09-29T09:30:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:46 crc kubenswrapper[4779]: I0929 09:30:46.713633 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:30:46 crc kubenswrapper[4779]: I0929 09:30:46.713682 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:30:46 crc kubenswrapper[4779]: E0929 09:30:46.713854 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 09:30:46 crc kubenswrapper[4779]: E0929 09:30:46.714052 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 09:30:46 crc kubenswrapper[4779]: I0929 09:30:46.730163 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:46 crc kubenswrapper[4779]: I0929 09:30:46.730205 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:46 crc kubenswrapper[4779]: I0929 09:30:46.730220 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:46 crc kubenswrapper[4779]: I0929 09:30:46.730237 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:46 crc kubenswrapper[4779]: I0929 09:30:46.730251 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:46Z","lastTransitionTime":"2025-09-29T09:30:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:46 crc kubenswrapper[4779]: I0929 09:30:46.833251 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:46 crc kubenswrapper[4779]: I0929 09:30:46.833311 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:46 crc kubenswrapper[4779]: I0929 09:30:46.833336 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:46 crc kubenswrapper[4779]: I0929 09:30:46.833365 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:46 crc kubenswrapper[4779]: I0929 09:30:46.833388 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:46Z","lastTransitionTime":"2025-09-29T09:30:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:46 crc kubenswrapper[4779]: I0929 09:30:46.935661 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:46 crc kubenswrapper[4779]: I0929 09:30:46.935764 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:46 crc kubenswrapper[4779]: I0929 09:30:46.935787 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:46 crc kubenswrapper[4779]: I0929 09:30:46.935815 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:46 crc kubenswrapper[4779]: I0929 09:30:46.935835 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:46Z","lastTransitionTime":"2025-09-29T09:30:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:47 crc kubenswrapper[4779]: I0929 09:30:47.038450 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:47 crc kubenswrapper[4779]: I0929 09:30:47.038613 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:47 crc kubenswrapper[4779]: I0929 09:30:47.038682 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:47 crc kubenswrapper[4779]: I0929 09:30:47.038709 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:47 crc kubenswrapper[4779]: I0929 09:30:47.038730 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:47Z","lastTransitionTime":"2025-09-29T09:30:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:47 crc kubenswrapper[4779]: I0929 09:30:47.141472 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:47 crc kubenswrapper[4779]: I0929 09:30:47.141807 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:47 crc kubenswrapper[4779]: I0929 09:30:47.141831 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:47 crc kubenswrapper[4779]: I0929 09:30:47.141863 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:47 crc kubenswrapper[4779]: I0929 09:30:47.141885 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:47Z","lastTransitionTime":"2025-09-29T09:30:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:47 crc kubenswrapper[4779]: I0929 09:30:47.243661 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:47 crc kubenswrapper[4779]: I0929 09:30:47.243729 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:47 crc kubenswrapper[4779]: I0929 09:30:47.243741 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:47 crc kubenswrapper[4779]: I0929 09:30:47.243780 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:47 crc kubenswrapper[4779]: I0929 09:30:47.243793 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:47Z","lastTransitionTime":"2025-09-29T09:30:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:47 crc kubenswrapper[4779]: I0929 09:30:47.346357 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:47 crc kubenswrapper[4779]: I0929 09:30:47.346408 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:47 crc kubenswrapper[4779]: I0929 09:30:47.346422 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:47 crc kubenswrapper[4779]: I0929 09:30:47.346439 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:47 crc kubenswrapper[4779]: I0929 09:30:47.346452 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:47Z","lastTransitionTime":"2025-09-29T09:30:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:47 crc kubenswrapper[4779]: I0929 09:30:47.449495 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:47 crc kubenswrapper[4779]: I0929 09:30:47.449539 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:47 crc kubenswrapper[4779]: I0929 09:30:47.449574 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:47 crc kubenswrapper[4779]: I0929 09:30:47.449593 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:47 crc kubenswrapper[4779]: I0929 09:30:47.449605 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:47Z","lastTransitionTime":"2025-09-29T09:30:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:47 crc kubenswrapper[4779]: I0929 09:30:47.553217 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:47 crc kubenswrapper[4779]: I0929 09:30:47.553282 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:47 crc kubenswrapper[4779]: I0929 09:30:47.553302 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:47 crc kubenswrapper[4779]: I0929 09:30:47.553325 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:47 crc kubenswrapper[4779]: I0929 09:30:47.553342 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:47Z","lastTransitionTime":"2025-09-29T09:30:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:47 crc kubenswrapper[4779]: I0929 09:30:47.656382 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:47 crc kubenswrapper[4779]: I0929 09:30:47.656427 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:47 crc kubenswrapper[4779]: I0929 09:30:47.656438 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:47 crc kubenswrapper[4779]: I0929 09:30:47.656458 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:47 crc kubenswrapper[4779]: I0929 09:30:47.656472 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:47Z","lastTransitionTime":"2025-09-29T09:30:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:47 crc kubenswrapper[4779]: I0929 09:30:47.713575 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:30:47 crc kubenswrapper[4779]: I0929 09:30:47.713667 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:30:47 crc kubenswrapper[4779]: E0929 09:30:47.713729 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 09:30:47 crc kubenswrapper[4779]: E0929 09:30:47.713854 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvlbd" podUID="294a4484-da93-4c37-9ecf-18f68f4ad64d" Sep 29 09:30:47 crc kubenswrapper[4779]: I0929 09:30:47.759428 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:47 crc kubenswrapper[4779]: I0929 09:30:47.759488 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:47 crc kubenswrapper[4779]: I0929 09:30:47.759501 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:47 crc kubenswrapper[4779]: I0929 09:30:47.759557 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:47 crc kubenswrapper[4779]: I0929 09:30:47.759570 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:47Z","lastTransitionTime":"2025-09-29T09:30:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:47 crc kubenswrapper[4779]: I0929 09:30:47.862352 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:47 crc kubenswrapper[4779]: I0929 09:30:47.862402 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:47 crc kubenswrapper[4779]: I0929 09:30:47.862414 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:47 crc kubenswrapper[4779]: I0929 09:30:47.862435 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:47 crc kubenswrapper[4779]: I0929 09:30:47.862452 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:47Z","lastTransitionTime":"2025-09-29T09:30:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:47 crc kubenswrapper[4779]: I0929 09:30:47.965483 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:47 crc kubenswrapper[4779]: I0929 09:30:47.965552 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:47 crc kubenswrapper[4779]: I0929 09:30:47.965570 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:47 crc kubenswrapper[4779]: I0929 09:30:47.965596 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:47 crc kubenswrapper[4779]: I0929 09:30:47.965616 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:47Z","lastTransitionTime":"2025-09-29T09:30:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:48 crc kubenswrapper[4779]: I0929 09:30:48.068595 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:48 crc kubenswrapper[4779]: I0929 09:30:48.068635 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:48 crc kubenswrapper[4779]: I0929 09:30:48.068645 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:48 crc kubenswrapper[4779]: I0929 09:30:48.068660 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:48 crc kubenswrapper[4779]: I0929 09:30:48.068671 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:48Z","lastTransitionTime":"2025-09-29T09:30:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:48 crc kubenswrapper[4779]: I0929 09:30:48.176667 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:48 crc kubenswrapper[4779]: I0929 09:30:48.176732 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:48 crc kubenswrapper[4779]: I0929 09:30:48.176746 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:48 crc kubenswrapper[4779]: I0929 09:30:48.176766 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:48 crc kubenswrapper[4779]: I0929 09:30:48.176778 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:48Z","lastTransitionTime":"2025-09-29T09:30:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:48 crc kubenswrapper[4779]: I0929 09:30:48.278869 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:48 crc kubenswrapper[4779]: I0929 09:30:48.278930 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:48 crc kubenswrapper[4779]: I0929 09:30:48.278943 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:48 crc kubenswrapper[4779]: I0929 09:30:48.278961 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:48 crc kubenswrapper[4779]: I0929 09:30:48.278980 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:48Z","lastTransitionTime":"2025-09-29T09:30:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:48 crc kubenswrapper[4779]: I0929 09:30:48.382119 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:48 crc kubenswrapper[4779]: I0929 09:30:48.382157 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:48 crc kubenswrapper[4779]: I0929 09:30:48.382166 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:48 crc kubenswrapper[4779]: I0929 09:30:48.382187 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:48 crc kubenswrapper[4779]: I0929 09:30:48.382197 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:48Z","lastTransitionTime":"2025-09-29T09:30:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:48 crc kubenswrapper[4779]: I0929 09:30:48.485184 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:48 crc kubenswrapper[4779]: I0929 09:30:48.485291 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:48 crc kubenswrapper[4779]: I0929 09:30:48.485321 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:48 crc kubenswrapper[4779]: I0929 09:30:48.485354 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:48 crc kubenswrapper[4779]: I0929 09:30:48.485379 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:48Z","lastTransitionTime":"2025-09-29T09:30:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:48 crc kubenswrapper[4779]: I0929 09:30:48.588768 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:48 crc kubenswrapper[4779]: I0929 09:30:48.588804 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:48 crc kubenswrapper[4779]: I0929 09:30:48.588812 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:48 crc kubenswrapper[4779]: I0929 09:30:48.588831 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:48 crc kubenswrapper[4779]: I0929 09:30:48.588843 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:48Z","lastTransitionTime":"2025-09-29T09:30:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:48 crc kubenswrapper[4779]: I0929 09:30:48.692079 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:48 crc kubenswrapper[4779]: I0929 09:30:48.692182 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:48 crc kubenswrapper[4779]: I0929 09:30:48.692209 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:48 crc kubenswrapper[4779]: I0929 09:30:48.692245 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:48 crc kubenswrapper[4779]: I0929 09:30:48.692270 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:48Z","lastTransitionTime":"2025-09-29T09:30:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:48 crc kubenswrapper[4779]: I0929 09:30:48.714379 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:30:48 crc kubenswrapper[4779]: I0929 09:30:48.714421 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:30:48 crc kubenswrapper[4779]: E0929 09:30:48.714543 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 09:30:48 crc kubenswrapper[4779]: E0929 09:30:48.714728 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 09:30:48 crc kubenswrapper[4779]: I0929 09:30:48.743316 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e291840-9db5-4515-810a-190428263dfb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6f627c2a2c06fbe034514d02939db8b7576b8cb753aa65a45a271482cf138c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde5335a7ca2db0e56ded3f2fc58506882df09718e6ada6dd1849b891ebc3dd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0f81c275d3f99ff02d4d105dfff34249cff38150c1aa6199bdc765773f0fa2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33e68ef001a27a30ef12923f95eaaef45667d18c8dab2997491caf12f595ed0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6837fc3f6d91505858a90e49945041b663c864b4ebcbd2ecc086c3575b47c1f7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T09:29:42Z\\\",\\\"message\\\":\\\"W0929 09:29:31.795697 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 09:29:31.796138 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759138171 cert, and key in /tmp/serving-cert-3245563206/serving-signer.crt, /tmp/serving-cert-3245563206/serving-signer.key\\\\nI0929 09:29:32.157414 1 observer_polling.go:159] Starting file observer\\\\nW0929 09:29:32.160455 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 09:29:32.160736 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 09:29:32.163324 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3245563206/tls.crt::/tmp/serving-cert-3245563206/tls.key\\\\\\\"\\\\nF0929 09:29:42.581603 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55099c1030bd4943e0df0b13b1203c7143f79edf5e02c8353ccd042610e8059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:48Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:48 crc kubenswrapper[4779]: I0929 09:30:48.769295 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:48Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:48 crc kubenswrapper[4779]: I0929 09:30:48.793045 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f96f3b3b0028e52c62e9b886d5f9c3347ec85f91ec0e5ef3de48bd216a2c687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f10690e584c904be791611af52b807ee3d20cb5551821042a1c5e71e1f1571d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:48Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:48 crc kubenswrapper[4779]: I0929 09:30:48.795453 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:48 crc kubenswrapper[4779]: I0929 09:30:48.795509 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:48 crc kubenswrapper[4779]: I0929 09:30:48.795526 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:48 crc kubenswrapper[4779]: I0929 09:30:48.795551 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:48 crc kubenswrapper[4779]: I0929 09:30:48.795570 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:48Z","lastTransitionTime":"2025-09-29T09:30:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:48 crc kubenswrapper[4779]: I0929 09:30:48.815316 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f75b386997d5f82c8a8136e8758dac2b770b2ef60bec255c70978046c3e6cd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:48Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:48 crc kubenswrapper[4779]: I0929 09:30:48.833049 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:48Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:48 crc kubenswrapper[4779]: I0929 09:30:48.855891 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2tkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b0e23f7-a478-48e2-a745-193a90e87553\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10bf019bbe72b02d7f7545cd6f01bde45c32ff2222682d62dac1992cfcdca115\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://944ced544427edda31dc5b05053a08a83a71829a896cb6dba002ca20ee3e56e4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T09:30:35Z\\\",\\\"message\\\":\\\"2025-09-29T09:29:50+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_66248466-581a-43c7-a305-4bf0ed7a54f9\\\\n2025-09-29T09:29:50+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_66248466-581a-43c7-a305-4bf0ed7a54f9 to /host/opt/cni/bin/\\\\n2025-09-29T09:29:50Z [verbose] multus-daemon started\\\\n2025-09-29T09:29:50Z [verbose] Readiness Indicator file check\\\\n2025-09-29T09:30:35Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:30:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5rx8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2tkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:48Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:48 crc kubenswrapper[4779]: I0929 09:30:48.884073 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-twvvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3a42c70c25df51312e2e5ede663678cbccf94d50dad43886557024669dd2e82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfe38229bcd948f0f8e1e234c8e4ea16d4dbc8c5c4252f5486048db0c9910412\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfe38229bcd948f0f8e1e234c8e4ea16d4dbc8c5c4252f5486048db0c9910412\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acbf355d9614f64771657a4103437e9c55f3b2b5080282d7ad3af70a5f44ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbf355d9614f64771657a4103437e9c55f3b2b5080282d7ad3af70a5f44ffce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b854329d7e329a9099a032b8e91ab2064164ae0e43e57b61c5c55b42fca2a9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b854329d7e329a9099a032b8e91ab2064164ae0e43e57b61c5c55b42fca2a9ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93312b300c9348ca8caa0f12bd7825d6b7d1d720babd765aa679c570220ac15a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93312b300c9348ca8caa0f12bd7825d6b7d1d720babd765aa679c570220ac15a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-twvvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:48Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:48 crc kubenswrapper[4779]: I0929 09:30:48.898740 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:48 crc kubenswrapper[4779]: I0929 09:30:48.898818 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:48 crc kubenswrapper[4779]: I0929 09:30:48.898842 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:48 crc kubenswrapper[4779]: I0929 09:30:48.898876 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:48 crc kubenswrapper[4779]: I0929 09:30:48.898899 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:48Z","lastTransitionTime":"2025-09-29T09:30:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:48 crc kubenswrapper[4779]: I0929 09:30:48.920335 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0846537-9071-453d-968b-7537a7233656\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://103d18548d558c96520d7c4c31d3ef44abc2905afa5c26ca3f93d2a1cd6ff604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ce06a39c8fd0fba9a0bd1af96ff34cfb10a153801ecff617db4df5e64eb476b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe3edd15b34d8223ba98808b34bee098cb2ffa1ac4fd3e949007acc04303e370\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22cc6e6e78a7eaac01a6c53d5e5527ef3120c8c44bf28813e65038f939e79872\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cc3416bb385ad0ff953b03c268f186747c47a2dfdf11ba34a3b00be5391e90e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8442bd56bf7c2e6708a3b0f6c21ba803d9bc76a84d32647b507d5961a78e8621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8442bd56bf7c2e6708a3b0f6c21ba803d9bc76a84d32647b507d5961a78e8621\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b03f58ad046bc93040f00ae10e8f265099417fcb84c10b4ca1707d6744729648\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b03f58ad046bc93040f00ae10e8f265099417fcb84c10b4ca1707d6744729648\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7e2abd0b0dc5d3b95c510100e11f7f2fa4b05786325b692244ea51633d2f597e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e2abd0b0dc5d3b95c510100e11f7f2fa4b05786325b692244ea51633d2f597e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:48Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:48 crc kubenswrapper[4779]: I0929 09:30:48.943135 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19a67159-61a5-4f75-940a-212c850bd498\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ff866eb8fdb247e8bfe638be9525a31733ed912047dcbbc7514ab6324675b6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a69616a6ca151dfda518aae66819aac108f73967794661b878db2480ffbb5b16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4bb91e73e9e2666ab82d5a13fee96ed7406f6178863603e607766ae3172d3ee6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2148aa80d9f71c8424530711e10af5b0bd10437655123ac57b87d1c90a280233\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:48Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:48 crc kubenswrapper[4779]: I0929 09:30:48.960948 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1a5d3a7-37d9-4a87-864c-e4af7f504a19\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462a76735c6e380ec32967ba87ebe13a19e1d3687ac87182f01b23ce8fabe8e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7979507aa9ce4c5e1c0f9a0d07716433ab9cebcc02e8b6ba01251ced0bab99f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7ndnj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5lnlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:48Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:48 crc kubenswrapper[4779]: I0929 09:30:48.993184 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60d71749-dfb5-4095-b11b-b70f1a549b88\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a334fd50a045f97d97f67b45abf88dc6aaf3d7c2121becc6a9744c86c977b070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d79814aeee8cb50cbe9a3153ed1a99a82921ac740553c82f6aafd1d7f628cbdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7851ecb6e73d56037db56d7d12fec0a095403080daa7bbf0d4cd79d9a4fd1bd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa8dee8aadd625828a22e22668d8f7295ddc3fe24976be6c8610da1157e28d7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a11183ba612a838b92d401b13a2e0d7d1c19de533ecef56b4305e5a9f0784bb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d6677148b1efa21f5eee1dcbccef6d6ea0492513bfa621b1a862488653bc8f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d18b61ad4ffcd8bc213caf632174b18bbb9e1f17b46ba199720c9f155c292f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d18b61ad4ffcd8bc213caf632174b18bbb9e1f17b46ba199720c9f155c292f5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T09:30:40Z\\\",\\\"message\\\":\\\"t.io/serving-cert-secret-name:kube-controller-manager-operator-serving-cert service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc0076d63df \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{0 8443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{app: kube-controller-manager-operator,},ClusterIP:10.217.4.219,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.4.219],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI0929 09:30:40.601664 6756 services_controller.go:444] Built service openshift-dns/dns-default LB per-node configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.4.10\\\\\\\"}, protocol:\\\\\\\"UDP\\\\\\\", inport:53, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[stri\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:30:39Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-ncxc4_openshift-ovn-kubernetes(60d71749-dfb5-4095-b11b-b70f1a549b88)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e140d46db39380a5737d78e1aa00ad1a5a9cc217de910ca4212beae87764795\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m99nd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-ncxc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:48Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.006970 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.007059 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.007082 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.007114 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.007133 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:49Z","lastTransitionTime":"2025-09-29T09:30:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.015223 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fxjq2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c469d18e-5b53-4a41-9305-87ae2f8db671\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c8ba3507185bd18ea878f73feed6ba33fb15a49a9f2575a66a88ac28fedef14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:30:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqdj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://01f30b5162df9f09f1af03ebeb5f7218a68b49bf4ff38cd06f3dad13d5042f88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:30:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqdj2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:30:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fxjq2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:49Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.032213 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d52ae941-3523-4957-aa78-7c744b7a14d3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://813ea0b64e4e968f7808a4f272fe8c987b24c23a26e2fd7e1fe2f2bbd3b0498e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://def414c582c214da578be9ef1b899f09185c7b6318cd6b191920d6293e89873f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e18a438c1a81da7779de1fc69aeff90acf287ced68bb63bf1faac1ba595ec528\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aeb35312a74983b7c72a2349d1b461968b67ae4cf725beeaa8699fd1c9c5845e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aeb35312a74983b7c72a2349d1b461968b67ae4cf725beeaa8699fd1c9c5845e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:49Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.049188 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:49Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.068710 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2adc78ec8f54811c8fd09a5e558132f9f57501d9a92d4738935148bb4e714eb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:49Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.084532 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5584" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"564bff56-93cb-42ac-bd34-bbe97f99f411\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedbde6cf29bb77c762d7da2d2e414d3e7c2c6edb3addd97473367b89b2e3fe7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvwtv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5584\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:49Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.099930 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gx6f2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25e887ba-720d-4f7b-9763-5703781fd8bf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a1604b3eb7d80cc28d7d36550e740405d3012bb83109444bf7354793ebbdeb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9lw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gx6f2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:49Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.109805 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.109871 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.109893 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.109961 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.109985 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:49Z","lastTransitionTime":"2025-09-29T09:30:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.117212 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-qvlbd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"294a4484-da93-4c37-9ecf-18f68f4ad64d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nsgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nsgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:30:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-qvlbd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:49Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.213738 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.213801 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.213821 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.213849 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.213869 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:49Z","lastTransitionTime":"2025-09-29T09:30:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.274440 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.274522 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.274555 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.274585 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.274611 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:49Z","lastTransitionTime":"2025-09-29T09:30:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:49 crc kubenswrapper[4779]: E0929 09:30:49.296472 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"25d3a4e6-deea-47ab-ac6b-f80ccadc03c7\\\",\\\"systemUUID\\\":\\\"6af97324-aa9b-4cb6-ab41-66056c52c25a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:49Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.300949 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.300988 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.301001 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.301023 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.301049 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:49Z","lastTransitionTime":"2025-09-29T09:30:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:49 crc kubenswrapper[4779]: E0929 09:30:49.318375 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"25d3a4e6-deea-47ab-ac6b-f80ccadc03c7\\\",\\\"systemUUID\\\":\\\"6af97324-aa9b-4cb6-ab41-66056c52c25a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:49Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.322028 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.322069 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.322077 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.322090 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.322099 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:49Z","lastTransitionTime":"2025-09-29T09:30:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:49 crc kubenswrapper[4779]: E0929 09:30:49.336614 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"25d3a4e6-deea-47ab-ac6b-f80ccadc03c7\\\",\\\"systemUUID\\\":\\\"6af97324-aa9b-4cb6-ab41-66056c52c25a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:49Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.341096 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.341120 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.341129 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.341143 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.341152 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:49Z","lastTransitionTime":"2025-09-29T09:30:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:49 crc kubenswrapper[4779]: E0929 09:30:49.353308 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"25d3a4e6-deea-47ab-ac6b-f80ccadc03c7\\\",\\\"systemUUID\\\":\\\"6af97324-aa9b-4cb6-ab41-66056c52c25a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:49Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.357423 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.357492 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.357513 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.357539 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.357557 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:49Z","lastTransitionTime":"2025-09-29T09:30:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:49 crc kubenswrapper[4779]: E0929 09:30:49.372203 4779 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T09:30:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"25d3a4e6-deea-47ab-ac6b-f80ccadc03c7\\\",\\\"systemUUID\\\":\\\"6af97324-aa9b-4cb6-ab41-66056c52c25a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:49Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:49 crc kubenswrapper[4779]: E0929 09:30:49.372424 4779 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.374640 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.374671 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.374680 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.374697 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.374708 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:49Z","lastTransitionTime":"2025-09-29T09:30:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.477360 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.477438 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.477449 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.477467 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.477478 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:49Z","lastTransitionTime":"2025-09-29T09:30:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.581110 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.581165 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.581180 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.581200 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.581216 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:49Z","lastTransitionTime":"2025-09-29T09:30:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.684883 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.684981 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.685001 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.685029 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.685050 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:49Z","lastTransitionTime":"2025-09-29T09:30:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.713953 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.714177 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:30:49 crc kubenswrapper[4779]: E0929 09:30:49.714328 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvlbd" podUID="294a4484-da93-4c37-9ecf-18f68f4ad64d" Sep 29 09:30:49 crc kubenswrapper[4779]: E0929 09:30:49.714484 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.727628 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.788521 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.788583 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.788605 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.788632 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.788650 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:49Z","lastTransitionTime":"2025-09-29T09:30:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.891875 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.891936 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.891946 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.891962 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.891973 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:49Z","lastTransitionTime":"2025-09-29T09:30:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.993915 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.993947 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.993958 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.993972 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:49 crc kubenswrapper[4779]: I0929 09:30:49.993983 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:49Z","lastTransitionTime":"2025-09-29T09:30:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:50 crc kubenswrapper[4779]: I0929 09:30:50.096643 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:50 crc kubenswrapper[4779]: I0929 09:30:50.096676 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:50 crc kubenswrapper[4779]: I0929 09:30:50.096687 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:50 crc kubenswrapper[4779]: I0929 09:30:50.096701 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:50 crc kubenswrapper[4779]: I0929 09:30:50.096711 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:50Z","lastTransitionTime":"2025-09-29T09:30:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:50 crc kubenswrapper[4779]: I0929 09:30:50.199334 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:50 crc kubenswrapper[4779]: I0929 09:30:50.199402 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:50 crc kubenswrapper[4779]: I0929 09:30:50.199428 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:50 crc kubenswrapper[4779]: I0929 09:30:50.199455 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:50 crc kubenswrapper[4779]: I0929 09:30:50.199491 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:50Z","lastTransitionTime":"2025-09-29T09:30:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:50 crc kubenswrapper[4779]: I0929 09:30:50.301684 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:50 crc kubenswrapper[4779]: I0929 09:30:50.301757 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:50 crc kubenswrapper[4779]: I0929 09:30:50.301783 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:50 crc kubenswrapper[4779]: I0929 09:30:50.301812 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:50 crc kubenswrapper[4779]: I0929 09:30:50.301834 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:50Z","lastTransitionTime":"2025-09-29T09:30:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:50 crc kubenswrapper[4779]: I0929 09:30:50.405198 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:50 crc kubenswrapper[4779]: I0929 09:30:50.405251 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:50 crc kubenswrapper[4779]: I0929 09:30:50.405269 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:50 crc kubenswrapper[4779]: I0929 09:30:50.405293 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:50 crc kubenswrapper[4779]: I0929 09:30:50.405310 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:50Z","lastTransitionTime":"2025-09-29T09:30:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:50 crc kubenswrapper[4779]: I0929 09:30:50.507944 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:50 crc kubenswrapper[4779]: I0929 09:30:50.507984 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:50 crc kubenswrapper[4779]: I0929 09:30:50.507992 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:50 crc kubenswrapper[4779]: I0929 09:30:50.508004 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:50 crc kubenswrapper[4779]: I0929 09:30:50.508040 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:50Z","lastTransitionTime":"2025-09-29T09:30:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:50 crc kubenswrapper[4779]: I0929 09:30:50.610277 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:50 crc kubenswrapper[4779]: I0929 09:30:50.610326 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:50 crc kubenswrapper[4779]: I0929 09:30:50.610337 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:50 crc kubenswrapper[4779]: I0929 09:30:50.610356 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:50 crc kubenswrapper[4779]: I0929 09:30:50.610368 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:50Z","lastTransitionTime":"2025-09-29T09:30:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:50 crc kubenswrapper[4779]: I0929 09:30:50.713223 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:30:50 crc kubenswrapper[4779]: I0929 09:30:50.713259 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:30:50 crc kubenswrapper[4779]: E0929 09:30:50.713537 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 09:30:50 crc kubenswrapper[4779]: E0929 09:30:50.713593 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 09:30:50 crc kubenswrapper[4779]: I0929 09:30:50.713831 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:50 crc kubenswrapper[4779]: I0929 09:30:50.713866 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:50 crc kubenswrapper[4779]: I0929 09:30:50.713876 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:50 crc kubenswrapper[4779]: I0929 09:30:50.713891 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:50 crc kubenswrapper[4779]: I0929 09:30:50.713922 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:50Z","lastTransitionTime":"2025-09-29T09:30:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:50 crc kubenswrapper[4779]: I0929 09:30:50.825294 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:50 crc kubenswrapper[4779]: I0929 09:30:50.825380 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:50 crc kubenswrapper[4779]: I0929 09:30:50.825422 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:50 crc kubenswrapper[4779]: I0929 09:30:50.825457 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:50 crc kubenswrapper[4779]: I0929 09:30:50.825481 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:50Z","lastTransitionTime":"2025-09-29T09:30:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:50 crc kubenswrapper[4779]: I0929 09:30:50.928645 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:50 crc kubenswrapper[4779]: I0929 09:30:50.928695 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:50 crc kubenswrapper[4779]: I0929 09:30:50.928706 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:50 crc kubenswrapper[4779]: I0929 09:30:50.928721 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:50 crc kubenswrapper[4779]: I0929 09:30:50.928732 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:50Z","lastTransitionTime":"2025-09-29T09:30:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:51 crc kubenswrapper[4779]: I0929 09:30:51.031721 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:51 crc kubenswrapper[4779]: I0929 09:30:51.031787 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:51 crc kubenswrapper[4779]: I0929 09:30:51.031799 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:51 crc kubenswrapper[4779]: I0929 09:30:51.031816 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:51 crc kubenswrapper[4779]: I0929 09:30:51.031829 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:51Z","lastTransitionTime":"2025-09-29T09:30:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:51 crc kubenswrapper[4779]: I0929 09:30:51.134923 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:51 crc kubenswrapper[4779]: I0929 09:30:51.134964 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:51 crc kubenswrapper[4779]: I0929 09:30:51.134972 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:51 crc kubenswrapper[4779]: I0929 09:30:51.134988 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:51 crc kubenswrapper[4779]: I0929 09:30:51.134998 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:51Z","lastTransitionTime":"2025-09-29T09:30:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:51 crc kubenswrapper[4779]: I0929 09:30:51.237970 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:51 crc kubenswrapper[4779]: I0929 09:30:51.238024 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:51 crc kubenswrapper[4779]: I0929 09:30:51.238057 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:51 crc kubenswrapper[4779]: I0929 09:30:51.238077 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:51 crc kubenswrapper[4779]: I0929 09:30:51.238089 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:51Z","lastTransitionTime":"2025-09-29T09:30:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:51 crc kubenswrapper[4779]: I0929 09:30:51.341016 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:51 crc kubenswrapper[4779]: I0929 09:30:51.341093 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:51 crc kubenswrapper[4779]: I0929 09:30:51.341117 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:51 crc kubenswrapper[4779]: I0929 09:30:51.341144 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:51 crc kubenswrapper[4779]: I0929 09:30:51.341202 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:51Z","lastTransitionTime":"2025-09-29T09:30:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:51 crc kubenswrapper[4779]: I0929 09:30:51.443324 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:51 crc kubenswrapper[4779]: I0929 09:30:51.443402 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:51 crc kubenswrapper[4779]: I0929 09:30:51.443424 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:51 crc kubenswrapper[4779]: I0929 09:30:51.443453 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:51 crc kubenswrapper[4779]: I0929 09:30:51.443476 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:51Z","lastTransitionTime":"2025-09-29T09:30:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:51 crc kubenswrapper[4779]: I0929 09:30:51.546778 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:51 crc kubenswrapper[4779]: I0929 09:30:51.546844 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:51 crc kubenswrapper[4779]: I0929 09:30:51.546863 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:51 crc kubenswrapper[4779]: I0929 09:30:51.546890 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:51 crc kubenswrapper[4779]: I0929 09:30:51.546944 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:51Z","lastTransitionTime":"2025-09-29T09:30:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:51 crc kubenswrapper[4779]: I0929 09:30:51.650573 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:51 crc kubenswrapper[4779]: I0929 09:30:51.650627 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:51 crc kubenswrapper[4779]: I0929 09:30:51.650646 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:51 crc kubenswrapper[4779]: I0929 09:30:51.650674 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:51 crc kubenswrapper[4779]: I0929 09:30:51.650693 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:51Z","lastTransitionTime":"2025-09-29T09:30:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:51 crc kubenswrapper[4779]: I0929 09:30:51.713521 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:30:51 crc kubenswrapper[4779]: I0929 09:30:51.713561 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:30:51 crc kubenswrapper[4779]: E0929 09:30:51.713765 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 09:30:51 crc kubenswrapper[4779]: E0929 09:30:51.713830 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvlbd" podUID="294a4484-da93-4c37-9ecf-18f68f4ad64d" Sep 29 09:30:51 crc kubenswrapper[4779]: I0929 09:30:51.753158 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:51 crc kubenswrapper[4779]: I0929 09:30:51.753256 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:51 crc kubenswrapper[4779]: I0929 09:30:51.753280 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:51 crc kubenswrapper[4779]: I0929 09:30:51.753307 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:51 crc kubenswrapper[4779]: I0929 09:30:51.753328 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:51Z","lastTransitionTime":"2025-09-29T09:30:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:51 crc kubenswrapper[4779]: I0929 09:30:51.855799 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:51 crc kubenswrapper[4779]: I0929 09:30:51.855834 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:51 crc kubenswrapper[4779]: I0929 09:30:51.855845 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:51 crc kubenswrapper[4779]: I0929 09:30:51.855862 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:51 crc kubenswrapper[4779]: I0929 09:30:51.855875 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:51Z","lastTransitionTime":"2025-09-29T09:30:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:51 crc kubenswrapper[4779]: I0929 09:30:51.958400 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:51 crc kubenswrapper[4779]: I0929 09:30:51.958436 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:51 crc kubenswrapper[4779]: I0929 09:30:51.958445 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:51 crc kubenswrapper[4779]: I0929 09:30:51.958477 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:51 crc kubenswrapper[4779]: I0929 09:30:51.958486 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:51Z","lastTransitionTime":"2025-09-29T09:30:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:52 crc kubenswrapper[4779]: I0929 09:30:52.060657 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:52 crc kubenswrapper[4779]: I0929 09:30:52.061016 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:52 crc kubenswrapper[4779]: I0929 09:30:52.061150 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:52 crc kubenswrapper[4779]: I0929 09:30:52.061317 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:52 crc kubenswrapper[4779]: I0929 09:30:52.061433 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:52Z","lastTransitionTime":"2025-09-29T09:30:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:52 crc kubenswrapper[4779]: I0929 09:30:52.164138 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:52 crc kubenswrapper[4779]: I0929 09:30:52.164188 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:52 crc kubenswrapper[4779]: I0929 09:30:52.164209 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:52 crc kubenswrapper[4779]: I0929 09:30:52.164238 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:52 crc kubenswrapper[4779]: I0929 09:30:52.164260 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:52Z","lastTransitionTime":"2025-09-29T09:30:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:52 crc kubenswrapper[4779]: I0929 09:30:52.267568 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:52 crc kubenswrapper[4779]: I0929 09:30:52.267610 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:52 crc kubenswrapper[4779]: I0929 09:30:52.267618 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:52 crc kubenswrapper[4779]: I0929 09:30:52.267634 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:52 crc kubenswrapper[4779]: I0929 09:30:52.267646 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:52Z","lastTransitionTime":"2025-09-29T09:30:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:52 crc kubenswrapper[4779]: I0929 09:30:52.371121 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:52 crc kubenswrapper[4779]: I0929 09:30:52.371163 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:52 crc kubenswrapper[4779]: I0929 09:30:52.371176 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:52 crc kubenswrapper[4779]: I0929 09:30:52.371206 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:52 crc kubenswrapper[4779]: I0929 09:30:52.371236 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:52Z","lastTransitionTime":"2025-09-29T09:30:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:52 crc kubenswrapper[4779]: I0929 09:30:52.474324 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:52 crc kubenswrapper[4779]: I0929 09:30:52.474362 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:52 crc kubenswrapper[4779]: I0929 09:30:52.474370 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:52 crc kubenswrapper[4779]: I0929 09:30:52.474387 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:52 crc kubenswrapper[4779]: I0929 09:30:52.474396 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:52Z","lastTransitionTime":"2025-09-29T09:30:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:52 crc kubenswrapper[4779]: I0929 09:30:52.576765 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:52 crc kubenswrapper[4779]: I0929 09:30:52.576799 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:52 crc kubenswrapper[4779]: I0929 09:30:52.576808 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:52 crc kubenswrapper[4779]: I0929 09:30:52.576820 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:52 crc kubenswrapper[4779]: I0929 09:30:52.576828 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:52Z","lastTransitionTime":"2025-09-29T09:30:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:52 crc kubenswrapper[4779]: I0929 09:30:52.632569 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 09:30:52 crc kubenswrapper[4779]: I0929 09:30:52.632685 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:30:52 crc kubenswrapper[4779]: E0929 09:30:52.632710 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 09:31:56.632682906 +0000 UTC m=+148.614006810 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:30:52 crc kubenswrapper[4779]: I0929 09:30:52.632754 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:30:52 crc kubenswrapper[4779]: E0929 09:30:52.632773 4779 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 09:30:52 crc kubenswrapper[4779]: E0929 09:30:52.632829 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 09:31:56.6328114 +0000 UTC m=+148.614135314 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 09:30:52 crc kubenswrapper[4779]: E0929 09:30:52.632925 4779 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 09:30:52 crc kubenswrapper[4779]: E0929 09:30:52.632985 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 09:31:56.632974685 +0000 UTC m=+148.614298589 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 09:30:52 crc kubenswrapper[4779]: I0929 09:30:52.680251 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:52 crc kubenswrapper[4779]: I0929 09:30:52.680297 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:52 crc kubenswrapper[4779]: I0929 09:30:52.680307 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:52 crc kubenswrapper[4779]: I0929 09:30:52.680328 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:52 crc kubenswrapper[4779]: I0929 09:30:52.680342 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:52Z","lastTransitionTime":"2025-09-29T09:30:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:52 crc kubenswrapper[4779]: I0929 09:30:52.714180 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:30:52 crc kubenswrapper[4779]: I0929 09:30:52.714180 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:30:52 crc kubenswrapper[4779]: E0929 09:30:52.714435 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 09:30:52 crc kubenswrapper[4779]: E0929 09:30:52.714563 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 09:30:52 crc kubenswrapper[4779]: I0929 09:30:52.733884 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:30:52 crc kubenswrapper[4779]: I0929 09:30:52.734093 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:30:52 crc kubenswrapper[4779]: E0929 09:30:52.734166 4779 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 09:30:52 crc kubenswrapper[4779]: E0929 09:30:52.734213 4779 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 09:30:52 crc kubenswrapper[4779]: E0929 09:30:52.734239 4779 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 09:30:52 crc kubenswrapper[4779]: E0929 09:30:52.734345 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-29 09:31:56.734311133 +0000 UTC m=+148.715635147 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 09:30:52 crc kubenswrapper[4779]: E0929 09:30:52.734402 4779 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 09:30:52 crc kubenswrapper[4779]: E0929 09:30:52.734459 4779 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 09:30:52 crc kubenswrapper[4779]: E0929 09:30:52.734490 4779 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 09:30:52 crc kubenswrapper[4779]: E0929 09:30:52.734615 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-29 09:31:56.734579722 +0000 UTC m=+148.715903746 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 09:30:52 crc kubenswrapper[4779]: I0929 09:30:52.783691 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:52 crc kubenswrapper[4779]: I0929 09:30:52.783752 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:52 crc kubenswrapper[4779]: I0929 09:30:52.783770 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:52 crc kubenswrapper[4779]: I0929 09:30:52.783788 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:52 crc kubenswrapper[4779]: I0929 09:30:52.783799 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:52Z","lastTransitionTime":"2025-09-29T09:30:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:52 crc kubenswrapper[4779]: I0929 09:30:52.886110 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:52 crc kubenswrapper[4779]: I0929 09:30:52.886176 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:52 crc kubenswrapper[4779]: I0929 09:30:52.886201 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:52 crc kubenswrapper[4779]: I0929 09:30:52.886229 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:52 crc kubenswrapper[4779]: I0929 09:30:52.886249 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:52Z","lastTransitionTime":"2025-09-29T09:30:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:52 crc kubenswrapper[4779]: I0929 09:30:52.990202 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:52 crc kubenswrapper[4779]: I0929 09:30:52.990242 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:52 crc kubenswrapper[4779]: I0929 09:30:52.990257 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:52 crc kubenswrapper[4779]: I0929 09:30:52.990279 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:52 crc kubenswrapper[4779]: I0929 09:30:52.990291 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:52Z","lastTransitionTime":"2025-09-29T09:30:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:53 crc kubenswrapper[4779]: I0929 09:30:53.092296 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:53 crc kubenswrapper[4779]: I0929 09:30:53.092371 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:53 crc kubenswrapper[4779]: I0929 09:30:53.092389 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:53 crc kubenswrapper[4779]: I0929 09:30:53.092414 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:53 crc kubenswrapper[4779]: I0929 09:30:53.092431 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:53Z","lastTransitionTime":"2025-09-29T09:30:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:53 crc kubenswrapper[4779]: I0929 09:30:53.195658 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:53 crc kubenswrapper[4779]: I0929 09:30:53.195702 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:53 crc kubenswrapper[4779]: I0929 09:30:53.195713 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:53 crc kubenswrapper[4779]: I0929 09:30:53.195729 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:53 crc kubenswrapper[4779]: I0929 09:30:53.195745 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:53Z","lastTransitionTime":"2025-09-29T09:30:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:53 crc kubenswrapper[4779]: I0929 09:30:53.299438 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:53 crc kubenswrapper[4779]: I0929 09:30:53.299511 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:53 crc kubenswrapper[4779]: I0929 09:30:53.299534 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:53 crc kubenswrapper[4779]: I0929 09:30:53.299592 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:53 crc kubenswrapper[4779]: I0929 09:30:53.299615 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:53Z","lastTransitionTime":"2025-09-29T09:30:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:53 crc kubenswrapper[4779]: I0929 09:30:53.403110 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:53 crc kubenswrapper[4779]: I0929 09:30:53.403178 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:53 crc kubenswrapper[4779]: I0929 09:30:53.403201 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:53 crc kubenswrapper[4779]: I0929 09:30:53.403232 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:53 crc kubenswrapper[4779]: I0929 09:30:53.403255 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:53Z","lastTransitionTime":"2025-09-29T09:30:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:53 crc kubenswrapper[4779]: I0929 09:30:53.506534 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:53 crc kubenswrapper[4779]: I0929 09:30:53.506579 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:53 crc kubenswrapper[4779]: I0929 09:30:53.506597 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:53 crc kubenswrapper[4779]: I0929 09:30:53.506619 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:53 crc kubenswrapper[4779]: I0929 09:30:53.506637 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:53Z","lastTransitionTime":"2025-09-29T09:30:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:53 crc kubenswrapper[4779]: I0929 09:30:53.609088 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:53 crc kubenswrapper[4779]: I0929 09:30:53.609122 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:53 crc kubenswrapper[4779]: I0929 09:30:53.609134 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:53 crc kubenswrapper[4779]: I0929 09:30:53.609149 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:53 crc kubenswrapper[4779]: I0929 09:30:53.609161 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:53Z","lastTransitionTime":"2025-09-29T09:30:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:53 crc kubenswrapper[4779]: I0929 09:30:53.712209 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:53 crc kubenswrapper[4779]: I0929 09:30:53.712319 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:53 crc kubenswrapper[4779]: I0929 09:30:53.712339 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:53 crc kubenswrapper[4779]: I0929 09:30:53.712364 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:53 crc kubenswrapper[4779]: I0929 09:30:53.712382 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:53Z","lastTransitionTime":"2025-09-29T09:30:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:53 crc kubenswrapper[4779]: I0929 09:30:53.713490 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:30:53 crc kubenswrapper[4779]: I0929 09:30:53.713490 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:30:53 crc kubenswrapper[4779]: E0929 09:30:53.713690 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvlbd" podUID="294a4484-da93-4c37-9ecf-18f68f4ad64d" Sep 29 09:30:53 crc kubenswrapper[4779]: E0929 09:30:53.713881 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 09:30:53 crc kubenswrapper[4779]: I0929 09:30:53.815264 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:53 crc kubenswrapper[4779]: I0929 09:30:53.815316 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:53 crc kubenswrapper[4779]: I0929 09:30:53.815331 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:53 crc kubenswrapper[4779]: I0929 09:30:53.815351 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:53 crc kubenswrapper[4779]: I0929 09:30:53.815368 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:53Z","lastTransitionTime":"2025-09-29T09:30:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:53 crc kubenswrapper[4779]: I0929 09:30:53.918145 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:53 crc kubenswrapper[4779]: I0929 09:30:53.918214 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:53 crc kubenswrapper[4779]: I0929 09:30:53.918238 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:53 crc kubenswrapper[4779]: I0929 09:30:53.918273 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:53 crc kubenswrapper[4779]: I0929 09:30:53.918300 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:53Z","lastTransitionTime":"2025-09-29T09:30:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:54 crc kubenswrapper[4779]: I0929 09:30:54.021201 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:54 crc kubenswrapper[4779]: I0929 09:30:54.021248 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:54 crc kubenswrapper[4779]: I0929 09:30:54.021263 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:54 crc kubenswrapper[4779]: I0929 09:30:54.021285 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:54 crc kubenswrapper[4779]: I0929 09:30:54.021297 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:54Z","lastTransitionTime":"2025-09-29T09:30:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:54 crc kubenswrapper[4779]: I0929 09:30:54.124181 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:54 crc kubenswrapper[4779]: I0929 09:30:54.124255 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:54 crc kubenswrapper[4779]: I0929 09:30:54.124277 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:54 crc kubenswrapper[4779]: I0929 09:30:54.124305 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:54 crc kubenswrapper[4779]: I0929 09:30:54.124330 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:54Z","lastTransitionTime":"2025-09-29T09:30:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:54 crc kubenswrapper[4779]: I0929 09:30:54.227740 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:54 crc kubenswrapper[4779]: I0929 09:30:54.227780 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:54 crc kubenswrapper[4779]: I0929 09:30:54.227792 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:54 crc kubenswrapper[4779]: I0929 09:30:54.227806 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:54 crc kubenswrapper[4779]: I0929 09:30:54.227815 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:54Z","lastTransitionTime":"2025-09-29T09:30:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:54 crc kubenswrapper[4779]: I0929 09:30:54.330974 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:54 crc kubenswrapper[4779]: I0929 09:30:54.331031 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:54 crc kubenswrapper[4779]: I0929 09:30:54.331051 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:54 crc kubenswrapper[4779]: I0929 09:30:54.331077 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:54 crc kubenswrapper[4779]: I0929 09:30:54.331096 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:54Z","lastTransitionTime":"2025-09-29T09:30:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:54 crc kubenswrapper[4779]: I0929 09:30:54.434049 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:54 crc kubenswrapper[4779]: I0929 09:30:54.434089 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:54 crc kubenswrapper[4779]: I0929 09:30:54.434099 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:54 crc kubenswrapper[4779]: I0929 09:30:54.434112 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:54 crc kubenswrapper[4779]: I0929 09:30:54.434120 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:54Z","lastTransitionTime":"2025-09-29T09:30:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:54 crc kubenswrapper[4779]: I0929 09:30:54.538040 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:54 crc kubenswrapper[4779]: I0929 09:30:54.538090 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:54 crc kubenswrapper[4779]: I0929 09:30:54.538108 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:54 crc kubenswrapper[4779]: I0929 09:30:54.538133 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:54 crc kubenswrapper[4779]: I0929 09:30:54.538154 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:54Z","lastTransitionTime":"2025-09-29T09:30:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:54 crc kubenswrapper[4779]: I0929 09:30:54.642316 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:54 crc kubenswrapper[4779]: I0929 09:30:54.642361 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:54 crc kubenswrapper[4779]: I0929 09:30:54.642373 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:54 crc kubenswrapper[4779]: I0929 09:30:54.642392 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:54 crc kubenswrapper[4779]: I0929 09:30:54.642408 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:54Z","lastTransitionTime":"2025-09-29T09:30:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:54 crc kubenswrapper[4779]: I0929 09:30:54.714147 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:30:54 crc kubenswrapper[4779]: I0929 09:30:54.714195 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:30:54 crc kubenswrapper[4779]: E0929 09:30:54.714332 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 09:30:54 crc kubenswrapper[4779]: E0929 09:30:54.714506 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 09:30:54 crc kubenswrapper[4779]: I0929 09:30:54.745536 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:54 crc kubenswrapper[4779]: I0929 09:30:54.745580 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:54 crc kubenswrapper[4779]: I0929 09:30:54.745594 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:54 crc kubenswrapper[4779]: I0929 09:30:54.745616 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:54 crc kubenswrapper[4779]: I0929 09:30:54.745631 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:54Z","lastTransitionTime":"2025-09-29T09:30:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:54 crc kubenswrapper[4779]: I0929 09:30:54.847944 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:54 crc kubenswrapper[4779]: I0929 09:30:54.847988 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:54 crc kubenswrapper[4779]: I0929 09:30:54.847997 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:54 crc kubenswrapper[4779]: I0929 09:30:54.848012 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:54 crc kubenswrapper[4779]: I0929 09:30:54.848024 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:54Z","lastTransitionTime":"2025-09-29T09:30:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:54 crc kubenswrapper[4779]: I0929 09:30:54.951551 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:54 crc kubenswrapper[4779]: I0929 09:30:54.951605 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:54 crc kubenswrapper[4779]: I0929 09:30:54.951620 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:54 crc kubenswrapper[4779]: I0929 09:30:54.951641 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:54 crc kubenswrapper[4779]: I0929 09:30:54.951654 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:54Z","lastTransitionTime":"2025-09-29T09:30:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:55 crc kubenswrapper[4779]: I0929 09:30:55.054146 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:55 crc kubenswrapper[4779]: I0929 09:30:55.054205 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:55 crc kubenswrapper[4779]: I0929 09:30:55.054223 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:55 crc kubenswrapper[4779]: I0929 09:30:55.054249 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:55 crc kubenswrapper[4779]: I0929 09:30:55.054272 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:55Z","lastTransitionTime":"2025-09-29T09:30:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:55 crc kubenswrapper[4779]: I0929 09:30:55.157537 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:55 crc kubenswrapper[4779]: I0929 09:30:55.157593 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:55 crc kubenswrapper[4779]: I0929 09:30:55.157610 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:55 crc kubenswrapper[4779]: I0929 09:30:55.157633 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:55 crc kubenswrapper[4779]: I0929 09:30:55.157653 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:55Z","lastTransitionTime":"2025-09-29T09:30:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:55 crc kubenswrapper[4779]: I0929 09:30:55.261288 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:55 crc kubenswrapper[4779]: I0929 09:30:55.261344 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:55 crc kubenswrapper[4779]: I0929 09:30:55.261361 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:55 crc kubenswrapper[4779]: I0929 09:30:55.261387 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:55 crc kubenswrapper[4779]: I0929 09:30:55.261405 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:55Z","lastTransitionTime":"2025-09-29T09:30:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:55 crc kubenswrapper[4779]: I0929 09:30:55.363864 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:55 crc kubenswrapper[4779]: I0929 09:30:55.363958 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:55 crc kubenswrapper[4779]: I0929 09:30:55.363979 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:55 crc kubenswrapper[4779]: I0929 09:30:55.364001 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:55 crc kubenswrapper[4779]: I0929 09:30:55.364020 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:55Z","lastTransitionTime":"2025-09-29T09:30:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:55 crc kubenswrapper[4779]: I0929 09:30:55.466713 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:55 crc kubenswrapper[4779]: I0929 09:30:55.466778 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:55 crc kubenswrapper[4779]: I0929 09:30:55.466800 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:55 crc kubenswrapper[4779]: I0929 09:30:55.466828 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:55 crc kubenswrapper[4779]: I0929 09:30:55.466848 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:55Z","lastTransitionTime":"2025-09-29T09:30:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:55 crc kubenswrapper[4779]: I0929 09:30:55.569179 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:55 crc kubenswrapper[4779]: I0929 09:30:55.569240 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:55 crc kubenswrapper[4779]: I0929 09:30:55.569263 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:55 crc kubenswrapper[4779]: I0929 09:30:55.569288 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:55 crc kubenswrapper[4779]: I0929 09:30:55.569306 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:55Z","lastTransitionTime":"2025-09-29T09:30:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:55 crc kubenswrapper[4779]: I0929 09:30:55.672011 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:55 crc kubenswrapper[4779]: I0929 09:30:55.672043 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:55 crc kubenswrapper[4779]: I0929 09:30:55.672054 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:55 crc kubenswrapper[4779]: I0929 09:30:55.672075 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:55 crc kubenswrapper[4779]: I0929 09:30:55.672086 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:55Z","lastTransitionTime":"2025-09-29T09:30:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:55 crc kubenswrapper[4779]: I0929 09:30:55.713335 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:30:55 crc kubenswrapper[4779]: I0929 09:30:55.713364 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:30:55 crc kubenswrapper[4779]: E0929 09:30:55.713479 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 09:30:55 crc kubenswrapper[4779]: E0929 09:30:55.713627 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvlbd" podUID="294a4484-da93-4c37-9ecf-18f68f4ad64d" Sep 29 09:30:55 crc kubenswrapper[4779]: I0929 09:30:55.775370 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:55 crc kubenswrapper[4779]: I0929 09:30:55.775409 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:55 crc kubenswrapper[4779]: I0929 09:30:55.775419 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:55 crc kubenswrapper[4779]: I0929 09:30:55.775458 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:55 crc kubenswrapper[4779]: I0929 09:30:55.775470 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:55Z","lastTransitionTime":"2025-09-29T09:30:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:55 crc kubenswrapper[4779]: I0929 09:30:55.878182 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:55 crc kubenswrapper[4779]: I0929 09:30:55.878482 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:55 crc kubenswrapper[4779]: I0929 09:30:55.878515 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:55 crc kubenswrapper[4779]: I0929 09:30:55.878600 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:55 crc kubenswrapper[4779]: I0929 09:30:55.878625 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:55Z","lastTransitionTime":"2025-09-29T09:30:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:55 crc kubenswrapper[4779]: I0929 09:30:55.981230 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:55 crc kubenswrapper[4779]: I0929 09:30:55.981274 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:55 crc kubenswrapper[4779]: I0929 09:30:55.981288 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:55 crc kubenswrapper[4779]: I0929 09:30:55.981308 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:55 crc kubenswrapper[4779]: I0929 09:30:55.981322 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:55Z","lastTransitionTime":"2025-09-29T09:30:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:56 crc kubenswrapper[4779]: I0929 09:30:56.083556 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:56 crc kubenswrapper[4779]: I0929 09:30:56.083593 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:56 crc kubenswrapper[4779]: I0929 09:30:56.083606 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:56 crc kubenswrapper[4779]: I0929 09:30:56.083627 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:56 crc kubenswrapper[4779]: I0929 09:30:56.083673 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:56Z","lastTransitionTime":"2025-09-29T09:30:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:56 crc kubenswrapper[4779]: I0929 09:30:56.185989 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:56 crc kubenswrapper[4779]: I0929 09:30:56.186480 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:56 crc kubenswrapper[4779]: I0929 09:30:56.186508 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:56 crc kubenswrapper[4779]: I0929 09:30:56.186540 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:56 crc kubenswrapper[4779]: I0929 09:30:56.186563 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:56Z","lastTransitionTime":"2025-09-29T09:30:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:56 crc kubenswrapper[4779]: I0929 09:30:56.289255 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:56 crc kubenswrapper[4779]: I0929 09:30:56.289333 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:56 crc kubenswrapper[4779]: I0929 09:30:56.289360 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:56 crc kubenswrapper[4779]: I0929 09:30:56.289392 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:56 crc kubenswrapper[4779]: I0929 09:30:56.289418 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:56Z","lastTransitionTime":"2025-09-29T09:30:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:56 crc kubenswrapper[4779]: I0929 09:30:56.392610 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:56 crc kubenswrapper[4779]: I0929 09:30:56.392715 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:56 crc kubenswrapper[4779]: I0929 09:30:56.392745 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:56 crc kubenswrapper[4779]: I0929 09:30:56.392781 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:56 crc kubenswrapper[4779]: I0929 09:30:56.392805 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:56Z","lastTransitionTime":"2025-09-29T09:30:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:56 crc kubenswrapper[4779]: I0929 09:30:56.496610 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:56 crc kubenswrapper[4779]: I0929 09:30:56.496678 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:56 crc kubenswrapper[4779]: I0929 09:30:56.496698 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:56 crc kubenswrapper[4779]: I0929 09:30:56.496727 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:56 crc kubenswrapper[4779]: I0929 09:30:56.496748 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:56Z","lastTransitionTime":"2025-09-29T09:30:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:56 crc kubenswrapper[4779]: I0929 09:30:56.600291 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:56 crc kubenswrapper[4779]: I0929 09:30:56.600368 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:56 crc kubenswrapper[4779]: I0929 09:30:56.600393 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:56 crc kubenswrapper[4779]: I0929 09:30:56.600429 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:56 crc kubenswrapper[4779]: I0929 09:30:56.600456 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:56Z","lastTransitionTime":"2025-09-29T09:30:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:56 crc kubenswrapper[4779]: I0929 09:30:56.704673 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:56 crc kubenswrapper[4779]: I0929 09:30:56.704767 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:56 crc kubenswrapper[4779]: I0929 09:30:56.704793 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:56 crc kubenswrapper[4779]: I0929 09:30:56.704830 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:56 crc kubenswrapper[4779]: I0929 09:30:56.704856 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:56Z","lastTransitionTime":"2025-09-29T09:30:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:56 crc kubenswrapper[4779]: I0929 09:30:56.714129 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:30:56 crc kubenswrapper[4779]: I0929 09:30:56.714408 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:30:56 crc kubenswrapper[4779]: E0929 09:30:56.714452 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 09:30:56 crc kubenswrapper[4779]: E0929 09:30:56.714638 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 09:30:56 crc kubenswrapper[4779]: I0929 09:30:56.807896 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:56 crc kubenswrapper[4779]: I0929 09:30:56.807998 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:56 crc kubenswrapper[4779]: I0929 09:30:56.808017 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:56 crc kubenswrapper[4779]: I0929 09:30:56.808044 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:56 crc kubenswrapper[4779]: I0929 09:30:56.808062 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:56Z","lastTransitionTime":"2025-09-29T09:30:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:56 crc kubenswrapper[4779]: I0929 09:30:56.919829 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:56 crc kubenswrapper[4779]: I0929 09:30:56.919939 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:56 crc kubenswrapper[4779]: I0929 09:30:56.919967 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:56 crc kubenswrapper[4779]: I0929 09:30:56.919998 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:56 crc kubenswrapper[4779]: I0929 09:30:56.920019 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:56Z","lastTransitionTime":"2025-09-29T09:30:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:57 crc kubenswrapper[4779]: I0929 09:30:57.023327 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:57 crc kubenswrapper[4779]: I0929 09:30:57.023391 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:57 crc kubenswrapper[4779]: I0929 09:30:57.023408 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:57 crc kubenswrapper[4779]: I0929 09:30:57.023430 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:57 crc kubenswrapper[4779]: I0929 09:30:57.023449 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:57Z","lastTransitionTime":"2025-09-29T09:30:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:57 crc kubenswrapper[4779]: I0929 09:30:57.126713 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:57 crc kubenswrapper[4779]: I0929 09:30:57.126776 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:57 crc kubenswrapper[4779]: I0929 09:30:57.126790 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:57 crc kubenswrapper[4779]: I0929 09:30:57.126814 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:57 crc kubenswrapper[4779]: I0929 09:30:57.126832 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:57Z","lastTransitionTime":"2025-09-29T09:30:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:57 crc kubenswrapper[4779]: I0929 09:30:57.230845 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:57 crc kubenswrapper[4779]: I0929 09:30:57.230955 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:57 crc kubenswrapper[4779]: I0929 09:30:57.230976 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:57 crc kubenswrapper[4779]: I0929 09:30:57.231006 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:57 crc kubenswrapper[4779]: I0929 09:30:57.231031 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:57Z","lastTransitionTime":"2025-09-29T09:30:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:57 crc kubenswrapper[4779]: I0929 09:30:57.335196 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:57 crc kubenswrapper[4779]: I0929 09:30:57.335289 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:57 crc kubenswrapper[4779]: I0929 09:30:57.335314 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:57 crc kubenswrapper[4779]: I0929 09:30:57.335349 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:57 crc kubenswrapper[4779]: I0929 09:30:57.335373 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:57Z","lastTransitionTime":"2025-09-29T09:30:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:57 crc kubenswrapper[4779]: I0929 09:30:57.439424 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:57 crc kubenswrapper[4779]: I0929 09:30:57.439528 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:57 crc kubenswrapper[4779]: I0929 09:30:57.439549 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:57 crc kubenswrapper[4779]: I0929 09:30:57.439581 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:57 crc kubenswrapper[4779]: I0929 09:30:57.439607 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:57Z","lastTransitionTime":"2025-09-29T09:30:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:57 crc kubenswrapper[4779]: I0929 09:30:57.543096 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:57 crc kubenswrapper[4779]: I0929 09:30:57.543148 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:57 crc kubenswrapper[4779]: I0929 09:30:57.543157 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:57 crc kubenswrapper[4779]: I0929 09:30:57.543175 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:57 crc kubenswrapper[4779]: I0929 09:30:57.543185 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:57Z","lastTransitionTime":"2025-09-29T09:30:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:57 crc kubenswrapper[4779]: I0929 09:30:57.646517 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:57 crc kubenswrapper[4779]: I0929 09:30:57.646587 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:57 crc kubenswrapper[4779]: I0929 09:30:57.646606 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:57 crc kubenswrapper[4779]: I0929 09:30:57.646634 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:57 crc kubenswrapper[4779]: I0929 09:30:57.646653 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:57Z","lastTransitionTime":"2025-09-29T09:30:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:57 crc kubenswrapper[4779]: I0929 09:30:57.713309 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:30:57 crc kubenswrapper[4779]: I0929 09:30:57.713329 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:30:57 crc kubenswrapper[4779]: E0929 09:30:57.713565 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvlbd" podUID="294a4484-da93-4c37-9ecf-18f68f4ad64d" Sep 29 09:30:57 crc kubenswrapper[4779]: E0929 09:30:57.714247 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 09:30:57 crc kubenswrapper[4779]: I0929 09:30:57.715009 4779 scope.go:117] "RemoveContainer" containerID="1d18b61ad4ffcd8bc213caf632174b18bbb9e1f17b46ba199720c9f155c292f5" Sep 29 09:30:57 crc kubenswrapper[4779]: E0929 09:30:57.715391 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-ncxc4_openshift-ovn-kubernetes(60d71749-dfb5-4095-b11b-b70f1a549b88)\"" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" podUID="60d71749-dfb5-4095-b11b-b70f1a549b88" Sep 29 09:30:57 crc kubenswrapper[4779]: I0929 09:30:57.750630 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:57 crc kubenswrapper[4779]: I0929 09:30:57.750668 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:57 crc kubenswrapper[4779]: I0929 09:30:57.750679 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:57 crc kubenswrapper[4779]: I0929 09:30:57.750695 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:57 crc kubenswrapper[4779]: I0929 09:30:57.750705 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:57Z","lastTransitionTime":"2025-09-29T09:30:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:57 crc kubenswrapper[4779]: I0929 09:30:57.853489 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:57 crc kubenswrapper[4779]: I0929 09:30:57.853564 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:57 crc kubenswrapper[4779]: I0929 09:30:57.853586 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:57 crc kubenswrapper[4779]: I0929 09:30:57.853615 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:57 crc kubenswrapper[4779]: I0929 09:30:57.853635 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:57Z","lastTransitionTime":"2025-09-29T09:30:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:57 crc kubenswrapper[4779]: I0929 09:30:57.957277 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:57 crc kubenswrapper[4779]: I0929 09:30:57.957333 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:57 crc kubenswrapper[4779]: I0929 09:30:57.957351 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:57 crc kubenswrapper[4779]: I0929 09:30:57.957375 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:57 crc kubenswrapper[4779]: I0929 09:30:57.957392 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:57Z","lastTransitionTime":"2025-09-29T09:30:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.060773 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.060839 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.060966 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.061034 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.061055 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:58Z","lastTransitionTime":"2025-09-29T09:30:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.163683 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.163758 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.163774 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.163791 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.163804 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:58Z","lastTransitionTime":"2025-09-29T09:30:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.267281 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.267336 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.267355 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.267379 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.267396 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:58Z","lastTransitionTime":"2025-09-29T09:30:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.371340 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.371419 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.371450 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.371486 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.371516 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:58Z","lastTransitionTime":"2025-09-29T09:30:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.474825 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.474929 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.474955 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.474985 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.475006 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:58Z","lastTransitionTime":"2025-09-29T09:30:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.577615 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.577657 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.577666 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.577679 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.577689 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:58Z","lastTransitionTime":"2025-09-29T09:30:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.680417 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.680472 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.680488 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.680513 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.680532 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:58Z","lastTransitionTime":"2025-09-29T09:30:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.713578 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.713675 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:30:58 crc kubenswrapper[4779]: E0929 09:30:58.713780 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 09:30:58 crc kubenswrapper[4779]: E0929 09:30:58.714076 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.731412 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:58Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.750092 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2tkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b0e23f7-a478-48e2-a745-193a90e87553\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:30:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10bf019bbe72b02d7f7545cd6f01bde45c32ff2222682d62dac1992cfcdca115\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://944ced544427edda31dc5b05053a08a83a71829a896cb6dba002ca20ee3e56e4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T09:30:35Z\\\",\\\"message\\\":\\\"2025-09-29T09:29:50+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_66248466-581a-43c7-a305-4bf0ed7a54f9\\\\n2025-09-29T09:29:50+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_66248466-581a-43c7-a305-4bf0ed7a54f9 to /host/opt/cni/bin/\\\\n2025-09-29T09:29:50Z [verbose] multus-daemon started\\\\n2025-09-29T09:29:50Z [verbose] Readiness Indicator file check\\\\n2025-09-29T09:30:35Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:30:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x5rx8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2tkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:58Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.767205 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-twvvx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95f4faf4-0a02-4440-ad6d-2ab0fae56bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3a42c70c25df51312e2e5ede663678cbccf94d50dad43886557024669dd2e82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d96bb64e9e6cf41355afdb1bca189e2fa07f0ae4d0ef36156256a1ebb4c451f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://693039553f2b253415a2e16926eea6704a21abd0a3395c6659db6ebab68e0f45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfe38229bcd948f0f8e1e234c8e4ea16d4dbc8c5c4252f5486048db0c9910412\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfe38229bcd948f0f8e1e234c8e4ea16d4dbc8c5c4252f5486048db0c9910412\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acbf355d9614f64771657a4103437e9c55f3b2b5080282d7ad3af70a5f44ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbf355d9614f64771657a4103437e9c55f3b2b5080282d7ad3af70a5f44ffce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b854329d7e329a9099a032b8e91ab2064164ae0e43e57b61c5c55b42fca2a9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b854329d7e329a9099a032b8e91ab2064164ae0e43e57b61c5c55b42fca2a9ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93312b300c9348ca8caa0f12bd7825d6b7d1d720babd765aa679c570220ac15a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93312b300c9348ca8caa0f12bd7825d6b7d1d720babd765aa679c570220ac15a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr62p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:48Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-twvvx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:58Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.781980 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e291840-9db5-4515-810a-190428263dfb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6f627c2a2c06fbe034514d02939db8b7576b8cb753aa65a45a271482cf138c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde5335a7ca2db0e56ded3f2fc58506882df09718e6ada6dd1849b891ebc3dd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0f81c275d3f99ff02d4d105dfff34249cff38150c1aa6199bdc765773f0fa2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33e68ef001a27a30ef12923f95eaaef45667d18c8dab2997491caf12f595ed0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6837fc3f6d91505858a90e49945041b663c864b4ebcbd2ecc086c3575b47c1f7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T09:29:42Z\\\",\\\"message\\\":\\\"W0929 09:29:31.795697 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 09:29:31.796138 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759138171 cert, and key in /tmp/serving-cert-3245563206/serving-signer.crt, /tmp/serving-cert-3245563206/serving-signer.key\\\\nI0929 09:29:32.157414 1 observer_polling.go:159] Starting file observer\\\\nW0929 09:29:32.160455 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 09:29:32.160736 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 09:29:32.163324 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3245563206/tls.crt::/tmp/serving-cert-3245563206/tls.key\\\\\\\"\\\\nF0929 09:29:42.581603 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55099c1030bd4943e0df0b13b1203c7143f79edf5e02c8353ccd042610e8059c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6829914f4826fbabdc8f3845da32a96f6a4d217abfc0cb8ec80efc187d33c72e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:58Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.783466 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.783723 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.783985 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.784250 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.784461 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:58Z","lastTransitionTime":"2025-09-29T09:30:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.791775 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b381812-e827-42ae-91ed-3f717d838cf1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://000b9907e5cd9992d56b78dcb66e94850ffd21fe78fd7232d38766489eddc815\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c4b0ce933d2a7a4ca883840e6bba48e35e7ae32e487a991ad0571a73fd08ade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c4b0ce933d2a7a4ca883840e6bba48e35e7ae32e487a991ad0571a73fd08ade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T09:29:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T09:29:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T09:29:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:58Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.802557 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:58Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.814212 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f96f3b3b0028e52c62e9b886d5f9c3347ec85f91ec0e5ef3de48bd216a2c687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f10690e584c904be791611af52b807ee3d20cb5551821042a1c5e71e1f1571d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:58Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.824815 4779 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T09:29:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f75b386997d5f82c8a8136e8758dac2b770b2ef60bec255c70978046c3e6cd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T09:29:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T09:30:58Z is after 2025-08-24T17:21:41Z" Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.867251 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=66.867231663 podStartE2EDuration="1m6.867231663s" podCreationTimestamp="2025-09-29 09:29:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:30:58.867076529 +0000 UTC m=+90.848400453" watchObservedRunningTime="2025-09-29 09:30:58.867231663 +0000 UTC m=+90.848555587" Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.887420 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.887456 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.887468 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.887483 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.887495 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:58Z","lastTransitionTime":"2025-09-29T09:30:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.893770 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podStartSLOduration=71.893735207 podStartE2EDuration="1m11.893735207s" podCreationTimestamp="2025-09-29 09:29:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:30:58.893642614 +0000 UTC m=+90.874966518" watchObservedRunningTime="2025-09-29 09:30:58.893735207 +0000 UTC m=+90.875059111" Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.893945 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=64.893938844 podStartE2EDuration="1m4.893938844s" podCreationTimestamp="2025-09-29 09:29:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:30:58.883837136 +0000 UTC m=+90.865161050" watchObservedRunningTime="2025-09-29 09:30:58.893938844 +0000 UTC m=+90.875262748" Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.921626 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fxjq2" podStartSLOduration=70.921609134 podStartE2EDuration="1m10.921609134s" podCreationTimestamp="2025-09-29 09:29:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:30:58.92147397 +0000 UTC m=+90.902797874" watchObservedRunningTime="2025-09-29 09:30:58.921609134 +0000 UTC m=+90.902933038" Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.946744 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=38.946724484 podStartE2EDuration="38.946724484s" podCreationTimestamp="2025-09-29 09:30:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:30:58.934938833 +0000 UTC m=+90.916262757" watchObservedRunningTime="2025-09-29 09:30:58.946724484 +0000 UTC m=+90.928048398" Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.974851 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-r5584" podStartSLOduration=71.974836579 podStartE2EDuration="1m11.974836579s" podCreationTimestamp="2025-09-29 09:29:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:30:58.974525579 +0000 UTC m=+90.955849483" watchObservedRunningTime="2025-09-29 09:30:58.974836579 +0000 UTC m=+90.956160483" Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.989186 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.989400 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.989469 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.989543 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.989621 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:58Z","lastTransitionTime":"2025-09-29T09:30:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:58 crc kubenswrapper[4779]: I0929 09:30:58.995549 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-gx6f2" podStartSLOduration=71.99553289 podStartE2EDuration="1m11.99553289s" podCreationTimestamp="2025-09-29 09:29:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:30:58.984876785 +0000 UTC m=+90.966200699" watchObservedRunningTime="2025-09-29 09:30:58.99553289 +0000 UTC m=+90.976856794" Sep 29 09:30:59 crc kubenswrapper[4779]: I0929 09:30:59.091600 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:59 crc kubenswrapper[4779]: I0929 09:30:59.091631 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:59 crc kubenswrapper[4779]: I0929 09:30:59.091661 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:59 crc kubenswrapper[4779]: I0929 09:30:59.091676 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:59 crc kubenswrapper[4779]: I0929 09:30:59.091684 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:59Z","lastTransitionTime":"2025-09-29T09:30:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:59 crc kubenswrapper[4779]: I0929 09:30:59.193348 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:59 crc kubenswrapper[4779]: I0929 09:30:59.193389 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:59 crc kubenswrapper[4779]: I0929 09:30:59.193405 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:59 crc kubenswrapper[4779]: I0929 09:30:59.193424 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:59 crc kubenswrapper[4779]: I0929 09:30:59.193435 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:59Z","lastTransitionTime":"2025-09-29T09:30:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:59 crc kubenswrapper[4779]: I0929 09:30:59.295464 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:59 crc kubenswrapper[4779]: I0929 09:30:59.295527 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:59 crc kubenswrapper[4779]: I0929 09:30:59.295545 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:59 crc kubenswrapper[4779]: I0929 09:30:59.295568 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:59 crc kubenswrapper[4779]: I0929 09:30:59.295585 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:59Z","lastTransitionTime":"2025-09-29T09:30:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:59 crc kubenswrapper[4779]: I0929 09:30:59.398497 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:59 crc kubenswrapper[4779]: I0929 09:30:59.398535 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:59 crc kubenswrapper[4779]: I0929 09:30:59.398543 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:59 crc kubenswrapper[4779]: I0929 09:30:59.398557 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:59 crc kubenswrapper[4779]: I0929 09:30:59.398566 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:59Z","lastTransitionTime":"2025-09-29T09:30:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:59 crc kubenswrapper[4779]: I0929 09:30:59.500831 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:59 crc kubenswrapper[4779]: I0929 09:30:59.500877 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:59 crc kubenswrapper[4779]: I0929 09:30:59.500889 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:59 crc kubenswrapper[4779]: I0929 09:30:59.500925 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:59 crc kubenswrapper[4779]: I0929 09:30:59.500936 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:59Z","lastTransitionTime":"2025-09-29T09:30:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:59 crc kubenswrapper[4779]: I0929 09:30:59.603504 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:59 crc kubenswrapper[4779]: I0929 09:30:59.603571 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:59 crc kubenswrapper[4779]: I0929 09:30:59.603588 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:59 crc kubenswrapper[4779]: I0929 09:30:59.603614 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:59 crc kubenswrapper[4779]: I0929 09:30:59.603632 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:59Z","lastTransitionTime":"2025-09-29T09:30:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:59 crc kubenswrapper[4779]: I0929 09:30:59.621869 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 09:30:59 crc kubenswrapper[4779]: I0929 09:30:59.621929 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 09:30:59 crc kubenswrapper[4779]: I0929 09:30:59.621944 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 09:30:59 crc kubenswrapper[4779]: I0929 09:30:59.621960 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 09:30:59 crc kubenswrapper[4779]: I0929 09:30:59.621972 4779 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T09:30:59Z","lastTransitionTime":"2025-09-29T09:30:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 09:30:59 crc kubenswrapper[4779]: I0929 09:30:59.666222 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-4nld9"] Sep 29 09:30:59 crc kubenswrapper[4779]: I0929 09:30:59.667322 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4nld9" Sep 29 09:30:59 crc kubenswrapper[4779]: I0929 09:30:59.672536 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Sep 29 09:30:59 crc kubenswrapper[4779]: I0929 09:30:59.672617 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Sep 29 09:30:59 crc kubenswrapper[4779]: I0929 09:30:59.672694 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Sep 29 09:30:59 crc kubenswrapper[4779]: I0929 09:30:59.675498 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Sep 29 09:30:59 crc kubenswrapper[4779]: I0929 09:30:59.713702 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:30:59 crc kubenswrapper[4779]: I0929 09:30:59.713701 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:30:59 crc kubenswrapper[4779]: E0929 09:30:59.713914 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 09:30:59 crc kubenswrapper[4779]: E0929 09:30:59.713884 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvlbd" podUID="294a4484-da93-4c37-9ecf-18f68f4ad64d" Sep 29 09:30:59 crc kubenswrapper[4779]: I0929 09:30:59.740642 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-f2tkr" podStartSLOduration=71.74062493 podStartE2EDuration="1m11.74062493s" podCreationTimestamp="2025-09-29 09:29:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:30:59.740368692 +0000 UTC m=+91.721692596" watchObservedRunningTime="2025-09-29 09:30:59.74062493 +0000 UTC m=+91.721948834" Sep 29 09:30:59 crc kubenswrapper[4779]: I0929 09:30:59.748421 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/34d76679-9d74-4870-ab4a-7b98558b8e00-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-4nld9\" (UID: \"34d76679-9d74-4870-ab4a-7b98558b8e00\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4nld9" Sep 29 09:30:59 crc kubenswrapper[4779]: I0929 09:30:59.748465 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/34d76679-9d74-4870-ab4a-7b98558b8e00-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-4nld9\" (UID: \"34d76679-9d74-4870-ab4a-7b98558b8e00\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4nld9" Sep 29 09:30:59 crc kubenswrapper[4779]: I0929 09:30:59.748490 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/34d76679-9d74-4870-ab4a-7b98558b8e00-service-ca\") pod \"cluster-version-operator-5c965bbfc6-4nld9\" (UID: \"34d76679-9d74-4870-ab4a-7b98558b8e00\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4nld9" Sep 29 09:30:59 crc kubenswrapper[4779]: I0929 09:30:59.748521 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/34d76679-9d74-4870-ab4a-7b98558b8e00-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-4nld9\" (UID: \"34d76679-9d74-4870-ab4a-7b98558b8e00\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4nld9" Sep 29 09:30:59 crc kubenswrapper[4779]: I0929 09:30:59.748597 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/34d76679-9d74-4870-ab4a-7b98558b8e00-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-4nld9\" (UID: \"34d76679-9d74-4870-ab4a-7b98558b8e00\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4nld9" Sep 29 09:30:59 crc kubenswrapper[4779]: I0929 09:30:59.764550 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-twvvx" podStartSLOduration=71.764533273 podStartE2EDuration="1m11.764533273s" podCreationTimestamp="2025-09-29 09:29:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:30:59.763529531 +0000 UTC m=+91.744853435" watchObservedRunningTime="2025-09-29 09:30:59.764533273 +0000 UTC m=+91.745857177" Sep 29 09:30:59 crc kubenswrapper[4779]: I0929 09:30:59.780017 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=72.780000619 podStartE2EDuration="1m12.780000619s" podCreationTimestamp="2025-09-29 09:29:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:30:59.779293177 +0000 UTC m=+91.760617111" watchObservedRunningTime="2025-09-29 09:30:59.780000619 +0000 UTC m=+91.761324523" Sep 29 09:30:59 crc kubenswrapper[4779]: I0929 09:30:59.790388 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=10.790370245 podStartE2EDuration="10.790370245s" podCreationTimestamp="2025-09-29 09:30:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:30:59.78955523 +0000 UTC m=+91.770879134" watchObservedRunningTime="2025-09-29 09:30:59.790370245 +0000 UTC m=+91.771694159" Sep 29 09:30:59 crc kubenswrapper[4779]: I0929 09:30:59.849124 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/34d76679-9d74-4870-ab4a-7b98558b8e00-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-4nld9\" (UID: \"34d76679-9d74-4870-ab4a-7b98558b8e00\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4nld9" Sep 29 09:30:59 crc kubenswrapper[4779]: I0929 09:30:59.849190 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/34d76679-9d74-4870-ab4a-7b98558b8e00-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-4nld9\" (UID: \"34d76679-9d74-4870-ab4a-7b98558b8e00\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4nld9" Sep 29 09:30:59 crc kubenswrapper[4779]: I0929 09:30:59.849220 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/34d76679-9d74-4870-ab4a-7b98558b8e00-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-4nld9\" (UID: \"34d76679-9d74-4870-ab4a-7b98558b8e00\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4nld9" Sep 29 09:30:59 crc kubenswrapper[4779]: I0929 09:30:59.849248 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/34d76679-9d74-4870-ab4a-7b98558b8e00-service-ca\") pod \"cluster-version-operator-5c965bbfc6-4nld9\" (UID: \"34d76679-9d74-4870-ab4a-7b98558b8e00\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4nld9" Sep 29 09:30:59 crc kubenswrapper[4779]: I0929 09:30:59.849276 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/34d76679-9d74-4870-ab4a-7b98558b8e00-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-4nld9\" (UID: \"34d76679-9d74-4870-ab4a-7b98558b8e00\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4nld9" Sep 29 09:30:59 crc kubenswrapper[4779]: I0929 09:30:59.849354 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/34d76679-9d74-4870-ab4a-7b98558b8e00-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-4nld9\" (UID: \"34d76679-9d74-4870-ab4a-7b98558b8e00\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4nld9" Sep 29 09:30:59 crc kubenswrapper[4779]: I0929 09:30:59.849354 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/34d76679-9d74-4870-ab4a-7b98558b8e00-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-4nld9\" (UID: \"34d76679-9d74-4870-ab4a-7b98558b8e00\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4nld9" Sep 29 09:30:59 crc kubenswrapper[4779]: I0929 09:30:59.850277 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/34d76679-9d74-4870-ab4a-7b98558b8e00-service-ca\") pod \"cluster-version-operator-5c965bbfc6-4nld9\" (UID: \"34d76679-9d74-4870-ab4a-7b98558b8e00\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4nld9" Sep 29 09:30:59 crc kubenswrapper[4779]: I0929 09:30:59.854466 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/34d76679-9d74-4870-ab4a-7b98558b8e00-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-4nld9\" (UID: \"34d76679-9d74-4870-ab4a-7b98558b8e00\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4nld9" Sep 29 09:30:59 crc kubenswrapper[4779]: I0929 09:30:59.870169 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/34d76679-9d74-4870-ab4a-7b98558b8e00-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-4nld9\" (UID: \"34d76679-9d74-4870-ab4a-7b98558b8e00\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4nld9" Sep 29 09:30:59 crc kubenswrapper[4779]: I0929 09:30:59.982161 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4nld9" Sep 29 09:30:59 crc kubenswrapper[4779]: W0929 09:30:59.993433 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod34d76679_9d74_4870_ab4a_7b98558b8e00.slice/crio-3c7bcc011dabecfce776140137589de93e14fed7df528770b21a01bcb2bedbd4 WatchSource:0}: Error finding container 3c7bcc011dabecfce776140137589de93e14fed7df528770b21a01bcb2bedbd4: Status 404 returned error can't find the container with id 3c7bcc011dabecfce776140137589de93e14fed7df528770b21a01bcb2bedbd4 Sep 29 09:31:00 crc kubenswrapper[4779]: I0929 09:31:00.232115 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4nld9" event={"ID":"34d76679-9d74-4870-ab4a-7b98558b8e00","Type":"ContainerStarted","Data":"618dd6714d2d66e1b298997752980012ee6ae6e9e9ad3d8e49be927877612bbc"} Sep 29 09:31:00 crc kubenswrapper[4779]: I0929 09:31:00.232189 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4nld9" event={"ID":"34d76679-9d74-4870-ab4a-7b98558b8e00","Type":"ContainerStarted","Data":"3c7bcc011dabecfce776140137589de93e14fed7df528770b21a01bcb2bedbd4"} Sep 29 09:31:00 crc kubenswrapper[4779]: I0929 09:31:00.713794 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:31:00 crc kubenswrapper[4779]: I0929 09:31:00.713987 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:31:00 crc kubenswrapper[4779]: E0929 09:31:00.714109 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 09:31:00 crc kubenswrapper[4779]: E0929 09:31:00.714385 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 09:31:01 crc kubenswrapper[4779]: I0929 09:31:01.714035 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:31:01 crc kubenswrapper[4779]: I0929 09:31:01.714039 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:31:01 crc kubenswrapper[4779]: E0929 09:31:01.714267 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 09:31:01 crc kubenswrapper[4779]: E0929 09:31:01.714323 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvlbd" podUID="294a4484-da93-4c37-9ecf-18f68f4ad64d" Sep 29 09:31:02 crc kubenswrapper[4779]: I0929 09:31:02.713802 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:31:02 crc kubenswrapper[4779]: I0929 09:31:02.713871 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:31:02 crc kubenswrapper[4779]: E0929 09:31:02.714237 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 09:31:02 crc kubenswrapper[4779]: E0929 09:31:02.714299 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 09:31:03 crc kubenswrapper[4779]: I0929 09:31:03.713635 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:31:03 crc kubenswrapper[4779]: E0929 09:31:03.713877 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 09:31:03 crc kubenswrapper[4779]: I0929 09:31:03.714159 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:31:03 crc kubenswrapper[4779]: E0929 09:31:03.715521 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvlbd" podUID="294a4484-da93-4c37-9ecf-18f68f4ad64d" Sep 29 09:31:04 crc kubenswrapper[4779]: I0929 09:31:04.713464 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:31:04 crc kubenswrapper[4779]: E0929 09:31:04.713690 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 09:31:04 crc kubenswrapper[4779]: I0929 09:31:04.713784 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:31:04 crc kubenswrapper[4779]: E0929 09:31:04.713854 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 09:31:05 crc kubenswrapper[4779]: I0929 09:31:05.713466 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:31:05 crc kubenswrapper[4779]: I0929 09:31:05.713467 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:31:05 crc kubenswrapper[4779]: E0929 09:31:05.713672 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvlbd" podUID="294a4484-da93-4c37-9ecf-18f68f4ad64d" Sep 29 09:31:05 crc kubenswrapper[4779]: E0929 09:31:05.713807 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 09:31:06 crc kubenswrapper[4779]: I0929 09:31:06.012378 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/294a4484-da93-4c37-9ecf-18f68f4ad64d-metrics-certs\") pod \"network-metrics-daemon-qvlbd\" (UID: \"294a4484-da93-4c37-9ecf-18f68f4ad64d\") " pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:31:06 crc kubenswrapper[4779]: E0929 09:31:06.012556 4779 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 09:31:06 crc kubenswrapper[4779]: E0929 09:31:06.012671 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/294a4484-da93-4c37-9ecf-18f68f4ad64d-metrics-certs podName:294a4484-da93-4c37-9ecf-18f68f4ad64d nodeName:}" failed. No retries permitted until 2025-09-29 09:32:10.012638707 +0000 UTC m=+161.993962651 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/294a4484-da93-4c37-9ecf-18f68f4ad64d-metrics-certs") pod "network-metrics-daemon-qvlbd" (UID: "294a4484-da93-4c37-9ecf-18f68f4ad64d") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 09:31:06 crc kubenswrapper[4779]: I0929 09:31:06.713162 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:31:06 crc kubenswrapper[4779]: I0929 09:31:06.713409 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:31:06 crc kubenswrapper[4779]: E0929 09:31:06.713418 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 09:31:06 crc kubenswrapper[4779]: E0929 09:31:06.713526 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 09:31:07 crc kubenswrapper[4779]: I0929 09:31:07.714160 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:31:07 crc kubenswrapper[4779]: I0929 09:31:07.714283 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:31:07 crc kubenswrapper[4779]: E0929 09:31:07.714366 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 09:31:07 crc kubenswrapper[4779]: E0929 09:31:07.714778 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvlbd" podUID="294a4484-da93-4c37-9ecf-18f68f4ad64d" Sep 29 09:31:08 crc kubenswrapper[4779]: I0929 09:31:08.714816 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:31:08 crc kubenswrapper[4779]: I0929 09:31:08.714866 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:31:08 crc kubenswrapper[4779]: E0929 09:31:08.715044 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 09:31:08 crc kubenswrapper[4779]: E0929 09:31:08.716429 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 09:31:08 crc kubenswrapper[4779]: I0929 09:31:08.716788 4779 scope.go:117] "RemoveContainer" containerID="1d18b61ad4ffcd8bc213caf632174b18bbb9e1f17b46ba199720c9f155c292f5" Sep 29 09:31:08 crc kubenswrapper[4779]: E0929 09:31:08.717174 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-ncxc4_openshift-ovn-kubernetes(60d71749-dfb5-4095-b11b-b70f1a549b88)\"" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" podUID="60d71749-dfb5-4095-b11b-b70f1a549b88" Sep 29 09:31:09 crc kubenswrapper[4779]: I0929 09:31:09.713670 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:31:09 crc kubenswrapper[4779]: I0929 09:31:09.713664 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:31:09 crc kubenswrapper[4779]: E0929 09:31:09.713782 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 09:31:09 crc kubenswrapper[4779]: E0929 09:31:09.713948 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvlbd" podUID="294a4484-da93-4c37-9ecf-18f68f4ad64d" Sep 29 09:31:10 crc kubenswrapper[4779]: I0929 09:31:10.713503 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:31:10 crc kubenswrapper[4779]: I0929 09:31:10.713533 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:31:10 crc kubenswrapper[4779]: E0929 09:31:10.713768 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 09:31:10 crc kubenswrapper[4779]: E0929 09:31:10.713852 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 09:31:11 crc kubenswrapper[4779]: I0929 09:31:11.714145 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:31:11 crc kubenswrapper[4779]: I0929 09:31:11.714195 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:31:11 crc kubenswrapper[4779]: E0929 09:31:11.714358 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvlbd" podUID="294a4484-da93-4c37-9ecf-18f68f4ad64d" Sep 29 09:31:11 crc kubenswrapper[4779]: E0929 09:31:11.714490 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 09:31:12 crc kubenswrapper[4779]: I0929 09:31:12.713802 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:31:12 crc kubenswrapper[4779]: I0929 09:31:12.713825 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:31:12 crc kubenswrapper[4779]: E0929 09:31:12.714084 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 09:31:12 crc kubenswrapper[4779]: E0929 09:31:12.714321 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 09:31:13 crc kubenswrapper[4779]: I0929 09:31:13.714082 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:31:13 crc kubenswrapper[4779]: I0929 09:31:13.714161 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:31:13 crc kubenswrapper[4779]: E0929 09:31:13.714224 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 09:31:13 crc kubenswrapper[4779]: E0929 09:31:13.714327 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvlbd" podUID="294a4484-da93-4c37-9ecf-18f68f4ad64d" Sep 29 09:31:14 crc kubenswrapper[4779]: I0929 09:31:14.714285 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:31:14 crc kubenswrapper[4779]: E0929 09:31:14.714472 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 09:31:14 crc kubenswrapper[4779]: I0929 09:31:14.714546 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:31:14 crc kubenswrapper[4779]: E0929 09:31:14.714769 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 09:31:15 crc kubenswrapper[4779]: I0929 09:31:15.713724 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:31:15 crc kubenswrapper[4779]: I0929 09:31:15.713766 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:31:15 crc kubenswrapper[4779]: E0929 09:31:15.713863 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvlbd" podUID="294a4484-da93-4c37-9ecf-18f68f4ad64d" Sep 29 09:31:15 crc kubenswrapper[4779]: E0929 09:31:15.714181 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 09:31:16 crc kubenswrapper[4779]: I0929 09:31:16.715129 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:31:16 crc kubenswrapper[4779]: E0929 09:31:16.715286 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 09:31:16 crc kubenswrapper[4779]: I0929 09:31:16.715779 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:31:16 crc kubenswrapper[4779]: E0929 09:31:16.715887 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 09:31:17 crc kubenswrapper[4779]: I0929 09:31:17.713278 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:31:17 crc kubenswrapper[4779]: I0929 09:31:17.713329 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:31:17 crc kubenswrapper[4779]: E0929 09:31:17.713505 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvlbd" podUID="294a4484-da93-4c37-9ecf-18f68f4ad64d" Sep 29 09:31:17 crc kubenswrapper[4779]: E0929 09:31:17.713778 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 09:31:18 crc kubenswrapper[4779]: I0929 09:31:18.714159 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:31:18 crc kubenswrapper[4779]: E0929 09:31:18.716418 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 09:31:18 crc kubenswrapper[4779]: I0929 09:31:18.716537 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:31:18 crc kubenswrapper[4779]: E0929 09:31:18.717087 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 09:31:19 crc kubenswrapper[4779]: I0929 09:31:19.713834 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:31:19 crc kubenswrapper[4779]: I0929 09:31:19.713864 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:31:19 crc kubenswrapper[4779]: E0929 09:31:19.714055 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvlbd" podUID="294a4484-da93-4c37-9ecf-18f68f4ad64d" Sep 29 09:31:19 crc kubenswrapper[4779]: E0929 09:31:19.714174 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 09:31:20 crc kubenswrapper[4779]: I0929 09:31:20.714043 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:31:20 crc kubenswrapper[4779]: I0929 09:31:20.714317 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:31:20 crc kubenswrapper[4779]: E0929 09:31:20.714488 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 09:31:20 crc kubenswrapper[4779]: E0929 09:31:20.714796 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 09:31:21 crc kubenswrapper[4779]: I0929 09:31:21.713335 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:31:21 crc kubenswrapper[4779]: I0929 09:31:21.713335 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:31:21 crc kubenswrapper[4779]: E0929 09:31:21.713559 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvlbd" podUID="294a4484-da93-4c37-9ecf-18f68f4ad64d" Sep 29 09:31:21 crc kubenswrapper[4779]: E0929 09:31:21.713648 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 09:31:22 crc kubenswrapper[4779]: I0929 09:31:22.307480 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-f2tkr_6b0e23f7-a478-48e2-a745-193a90e87553/kube-multus/1.log" Sep 29 09:31:22 crc kubenswrapper[4779]: I0929 09:31:22.308430 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-f2tkr_6b0e23f7-a478-48e2-a745-193a90e87553/kube-multus/0.log" Sep 29 09:31:22 crc kubenswrapper[4779]: I0929 09:31:22.308509 4779 generic.go:334] "Generic (PLEG): container finished" podID="6b0e23f7-a478-48e2-a745-193a90e87553" containerID="10bf019bbe72b02d7f7545cd6f01bde45c32ff2222682d62dac1992cfcdca115" exitCode=1 Sep 29 09:31:22 crc kubenswrapper[4779]: I0929 09:31:22.308550 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-f2tkr" event={"ID":"6b0e23f7-a478-48e2-a745-193a90e87553","Type":"ContainerDied","Data":"10bf019bbe72b02d7f7545cd6f01bde45c32ff2222682d62dac1992cfcdca115"} Sep 29 09:31:22 crc kubenswrapper[4779]: I0929 09:31:22.308594 4779 scope.go:117] "RemoveContainer" containerID="944ced544427edda31dc5b05053a08a83a71829a896cb6dba002ca20ee3e56e4" Sep 29 09:31:22 crc kubenswrapper[4779]: I0929 09:31:22.309184 4779 scope.go:117] "RemoveContainer" containerID="10bf019bbe72b02d7f7545cd6f01bde45c32ff2222682d62dac1992cfcdca115" Sep 29 09:31:22 crc kubenswrapper[4779]: E0929 09:31:22.309430 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-f2tkr_openshift-multus(6b0e23f7-a478-48e2-a745-193a90e87553)\"" pod="openshift-multus/multus-f2tkr" podUID="6b0e23f7-a478-48e2-a745-193a90e87553" Sep 29 09:31:22 crc kubenswrapper[4779]: I0929 09:31:22.333095 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4nld9" podStartSLOduration=94.333073879 podStartE2EDuration="1m34.333073879s" podCreationTimestamp="2025-09-29 09:29:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:31:00.2491981 +0000 UTC m=+92.230522004" watchObservedRunningTime="2025-09-29 09:31:22.333073879 +0000 UTC m=+114.314397813" Sep 29 09:31:22 crc kubenswrapper[4779]: I0929 09:31:22.713402 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:31:22 crc kubenswrapper[4779]: I0929 09:31:22.713444 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:31:22 crc kubenswrapper[4779]: E0929 09:31:22.713595 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 09:31:22 crc kubenswrapper[4779]: E0929 09:31:22.713987 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 09:31:22 crc kubenswrapper[4779]: I0929 09:31:22.714262 4779 scope.go:117] "RemoveContainer" containerID="1d18b61ad4ffcd8bc213caf632174b18bbb9e1f17b46ba199720c9f155c292f5" Sep 29 09:31:23 crc kubenswrapper[4779]: I0929 09:31:23.320475 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-ncxc4_60d71749-dfb5-4095-b11b-b70f1a549b88/ovnkube-controller/3.log" Sep 29 09:31:23 crc kubenswrapper[4779]: I0929 09:31:23.324065 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" event={"ID":"60d71749-dfb5-4095-b11b-b70f1a549b88","Type":"ContainerStarted","Data":"ae270c4b8f37d6667fbfe00567b00a3aedbc780c87b2638b23a3a8dfc3f36806"} Sep 29 09:31:23 crc kubenswrapper[4779]: I0929 09:31:23.324692 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:31:23 crc kubenswrapper[4779]: I0929 09:31:23.325348 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-f2tkr_6b0e23f7-a478-48e2-a745-193a90e87553/kube-multus/1.log" Sep 29 09:31:23 crc kubenswrapper[4779]: I0929 09:31:23.691075 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" podStartSLOduration=95.69105771 podStartE2EDuration="1m35.69105771s" podCreationTimestamp="2025-09-29 09:29:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:31:23.354614015 +0000 UTC m=+115.335937919" watchObservedRunningTime="2025-09-29 09:31:23.69105771 +0000 UTC m=+115.672381614" Sep 29 09:31:23 crc kubenswrapper[4779]: I0929 09:31:23.691983 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-qvlbd"] Sep 29 09:31:23 crc kubenswrapper[4779]: I0929 09:31:23.692119 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:31:23 crc kubenswrapper[4779]: E0929 09:31:23.692229 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvlbd" podUID="294a4484-da93-4c37-9ecf-18f68f4ad64d" Sep 29 09:31:23 crc kubenswrapper[4779]: I0929 09:31:23.713813 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:31:23 crc kubenswrapper[4779]: E0929 09:31:23.713932 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 09:31:24 crc kubenswrapper[4779]: I0929 09:31:24.714216 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:31:24 crc kubenswrapper[4779]: E0929 09:31:24.714740 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 09:31:24 crc kubenswrapper[4779]: I0929 09:31:24.714254 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:31:24 crc kubenswrapper[4779]: E0929 09:31:24.715173 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 09:31:25 crc kubenswrapper[4779]: I0929 09:31:25.714062 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:31:25 crc kubenswrapper[4779]: I0929 09:31:25.714086 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:31:25 crc kubenswrapper[4779]: E0929 09:31:25.714207 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvlbd" podUID="294a4484-da93-4c37-9ecf-18f68f4ad64d" Sep 29 09:31:25 crc kubenswrapper[4779]: E0929 09:31:25.714327 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 09:31:26 crc kubenswrapper[4779]: I0929 09:31:26.714460 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:31:26 crc kubenswrapper[4779]: I0929 09:31:26.714547 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:31:26 crc kubenswrapper[4779]: E0929 09:31:26.714730 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 09:31:26 crc kubenswrapper[4779]: E0929 09:31:26.714979 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 09:31:27 crc kubenswrapper[4779]: I0929 09:31:27.713587 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:31:27 crc kubenswrapper[4779]: E0929 09:31:27.713699 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 09:31:27 crc kubenswrapper[4779]: I0929 09:31:27.713592 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:31:27 crc kubenswrapper[4779]: E0929 09:31:27.713766 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvlbd" podUID="294a4484-da93-4c37-9ecf-18f68f4ad64d" Sep 29 09:31:28 crc kubenswrapper[4779]: E0929 09:31:28.687821 4779 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Sep 29 09:31:28 crc kubenswrapper[4779]: I0929 09:31:28.713535 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:31:28 crc kubenswrapper[4779]: E0929 09:31:28.715704 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 09:31:28 crc kubenswrapper[4779]: I0929 09:31:28.715937 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:31:28 crc kubenswrapper[4779]: E0929 09:31:28.716178 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 09:31:28 crc kubenswrapper[4779]: E0929 09:31:28.831546 4779 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 29 09:31:29 crc kubenswrapper[4779]: I0929 09:31:29.714508 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:31:29 crc kubenswrapper[4779]: E0929 09:31:29.714718 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 09:31:29 crc kubenswrapper[4779]: I0929 09:31:29.716090 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:31:29 crc kubenswrapper[4779]: E0929 09:31:29.716302 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvlbd" podUID="294a4484-da93-4c37-9ecf-18f68f4ad64d" Sep 29 09:31:30 crc kubenswrapper[4779]: I0929 09:31:30.713822 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:31:30 crc kubenswrapper[4779]: I0929 09:31:30.714155 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:31:30 crc kubenswrapper[4779]: E0929 09:31:30.715642 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 09:31:30 crc kubenswrapper[4779]: E0929 09:31:30.716156 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 09:31:31 crc kubenswrapper[4779]: I0929 09:31:31.713818 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:31:31 crc kubenswrapper[4779]: E0929 09:31:31.714005 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvlbd" podUID="294a4484-da93-4c37-9ecf-18f68f4ad64d" Sep 29 09:31:31 crc kubenswrapper[4779]: I0929 09:31:31.713845 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:31:31 crc kubenswrapper[4779]: E0929 09:31:31.714635 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 09:31:32 crc kubenswrapper[4779]: I0929 09:31:32.713747 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:31:32 crc kubenswrapper[4779]: I0929 09:31:32.713883 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:31:32 crc kubenswrapper[4779]: E0929 09:31:32.714076 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 09:31:32 crc kubenswrapper[4779]: E0929 09:31:32.714133 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 09:31:33 crc kubenswrapper[4779]: I0929 09:31:33.713733 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:31:33 crc kubenswrapper[4779]: I0929 09:31:33.713804 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:31:33 crc kubenswrapper[4779]: E0929 09:31:33.713900 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 09:31:33 crc kubenswrapper[4779]: E0929 09:31:33.714199 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvlbd" podUID="294a4484-da93-4c37-9ecf-18f68f4ad64d" Sep 29 09:31:33 crc kubenswrapper[4779]: I0929 09:31:33.714651 4779 scope.go:117] "RemoveContainer" containerID="10bf019bbe72b02d7f7545cd6f01bde45c32ff2222682d62dac1992cfcdca115" Sep 29 09:31:33 crc kubenswrapper[4779]: E0929 09:31:33.832936 4779 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 29 09:31:34 crc kubenswrapper[4779]: I0929 09:31:34.366855 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-f2tkr_6b0e23f7-a478-48e2-a745-193a90e87553/kube-multus/1.log" Sep 29 09:31:34 crc kubenswrapper[4779]: I0929 09:31:34.367294 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-f2tkr" event={"ID":"6b0e23f7-a478-48e2-a745-193a90e87553","Type":"ContainerStarted","Data":"4bb23b9d833a44d610758b9d19cba1ff80274c767464a459a50ef01121718270"} Sep 29 09:31:34 crc kubenswrapper[4779]: I0929 09:31:34.714281 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:31:34 crc kubenswrapper[4779]: I0929 09:31:34.714292 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:31:34 crc kubenswrapper[4779]: E0929 09:31:34.714771 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 09:31:34 crc kubenswrapper[4779]: E0929 09:31:34.715013 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 09:31:35 crc kubenswrapper[4779]: I0929 09:31:35.713434 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:31:35 crc kubenswrapper[4779]: E0929 09:31:35.713601 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 09:31:35 crc kubenswrapper[4779]: I0929 09:31:35.713435 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:31:35 crc kubenswrapper[4779]: E0929 09:31:35.713734 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvlbd" podUID="294a4484-da93-4c37-9ecf-18f68f4ad64d" Sep 29 09:31:36 crc kubenswrapper[4779]: I0929 09:31:36.713699 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:31:36 crc kubenswrapper[4779]: I0929 09:31:36.713809 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:31:36 crc kubenswrapper[4779]: E0929 09:31:36.713966 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 09:31:36 crc kubenswrapper[4779]: E0929 09:31:36.714166 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 09:31:37 crc kubenswrapper[4779]: I0929 09:31:37.713885 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:31:37 crc kubenswrapper[4779]: I0929 09:31:37.713964 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:31:37 crc kubenswrapper[4779]: E0929 09:31:37.714038 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 09:31:37 crc kubenswrapper[4779]: E0929 09:31:37.714220 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvlbd" podUID="294a4484-da93-4c37-9ecf-18f68f4ad64d" Sep 29 09:31:38 crc kubenswrapper[4779]: I0929 09:31:38.715051 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:31:38 crc kubenswrapper[4779]: E0929 09:31:38.715783 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 09:31:38 crc kubenswrapper[4779]: I0929 09:31:38.715843 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:31:38 crc kubenswrapper[4779]: E0929 09:31:38.716069 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 09:31:39 crc kubenswrapper[4779]: I0929 09:31:39.714058 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:31:39 crc kubenswrapper[4779]: I0929 09:31:39.714718 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:31:39 crc kubenswrapper[4779]: I0929 09:31:39.716845 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Sep 29 09:31:39 crc kubenswrapper[4779]: I0929 09:31:39.717022 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Sep 29 09:31:39 crc kubenswrapper[4779]: I0929 09:31:39.717448 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Sep 29 09:31:39 crc kubenswrapper[4779]: I0929 09:31:39.718243 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.463770 4779 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.517313 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-j6lzv"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.518159 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-j6lzv" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.521024 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-44nv6"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.521688 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44nv6" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.523567 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.525618 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.526861 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-zhwns"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.528176 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-fq82t"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.529314 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-fq82t" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.529983 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-dvvh4"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.530493 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-zhwns" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.537017 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-dvvh4" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.543068 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-4m9cx"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.543684 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4m9cx" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.544289 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.545034 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.545153 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.545284 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.545314 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.545482 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.545536 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.545675 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.549357 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.550486 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.550720 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.550980 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.551403 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.551618 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.551899 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.552056 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.552146 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.552494 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.552647 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.552955 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.553857 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.554047 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.555578 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.555874 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.555998 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.556094 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.556145 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.556331 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.556612 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.561855 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-4sjxm"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.562429 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-v6825"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.563051 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-v6825" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.563549 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-4sjxm" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.563949 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-xqgjv"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.564657 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-xqgjv" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.570076 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-7zdlq"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.570688 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-7zdlq" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.577527 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-wbk58"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.578311 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-wbk58" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.581864 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.582068 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.582162 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.582271 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.583194 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.592887 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.593332 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.593355 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.593748 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.593993 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.594107 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.594892 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.592889 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.595608 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.595657 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.595924 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.598463 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.600979 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.604853 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.605239 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.605386 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.605763 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.605936 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.606014 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.606108 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.606548 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.606752 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.606820 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.606881 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.607147 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.607285 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-m9sp2"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.607306 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.607340 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.607681 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.607703 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.608243 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-m9sp2" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.608312 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.611531 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-zfpqh"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.612540 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-qpmlh"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.612698 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c95dt\" (UniqueName: \"kubernetes.io/projected/edc8c507-aa54-4f0e-b64f-265ff1860ca0-kube-api-access-c95dt\") pod \"machine-api-operator-5694c8668f-fq82t\" (UID: \"edc8c507-aa54-4f0e-b64f-265ff1860ca0\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fq82t" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.612771 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/edc8c507-aa54-4f0e-b64f-265ff1860ca0-config\") pod \"machine-api-operator-5694c8668f-fq82t\" (UID: \"edc8c507-aa54-4f0e-b64f-265ff1860ca0\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fq82t" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.612806 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/4a3ce3bd-7194-4b10-9788-2b570b1814c0-encryption-config\") pod \"apiserver-7bbb656c7d-44nv6\" (UID: \"4a3ce3bd-7194-4b10-9788-2b570b1814c0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44nv6" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.612862 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/f420d1df-5091-46b0-be80-9a83e5be1a65-encryption-config\") pod \"apiserver-76f77b778f-zhwns\" (UID: \"f420d1df-5091-46b0-be80-9a83e5be1a65\") " pod="openshift-apiserver/apiserver-76f77b778f-zhwns" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.612896 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/edc8c507-aa54-4f0e-b64f-265ff1860ca0-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-fq82t\" (UID: \"edc8c507-aa54-4f0e-b64f-265ff1860ca0\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fq82t" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.612952 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/f420d1df-5091-46b0-be80-9a83e5be1a65-image-import-ca\") pod \"apiserver-76f77b778f-zhwns\" (UID: \"f420d1df-5091-46b0-be80-9a83e5be1a65\") " pod="openshift-apiserver/apiserver-76f77b778f-zhwns" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.612985 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/66b2a431-ff77-457e-99aa-d4dd40bc4640-serving-cert\") pod \"controller-manager-879f6c89f-j6lzv\" (UID: \"66b2a431-ff77-457e-99aa-d4dd40bc4640\") " pod="openshift-controller-manager/controller-manager-879f6c89f-j6lzv" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.613027 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4a3ce3bd-7194-4b10-9788-2b570b1814c0-serving-cert\") pod \"apiserver-7bbb656c7d-44nv6\" (UID: \"4a3ce3bd-7194-4b10-9788-2b570b1814c0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44nv6" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.613060 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4a3ce3bd-7194-4b10-9788-2b570b1814c0-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-44nv6\" (UID: \"4a3ce3bd-7194-4b10-9788-2b570b1814c0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44nv6" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.613093 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9n7pb\" (UniqueName: \"kubernetes.io/projected/4a3ce3bd-7194-4b10-9788-2b570b1814c0-kube-api-access-9n7pb\") pod \"apiserver-7bbb656c7d-44nv6\" (UID: \"4a3ce3bd-7194-4b10-9788-2b570b1814c0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44nv6" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.613123 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/f420d1df-5091-46b0-be80-9a83e5be1a65-etcd-client\") pod \"apiserver-76f77b778f-zhwns\" (UID: \"f420d1df-5091-46b0-be80-9a83e5be1a65\") " pod="openshift-apiserver/apiserver-76f77b778f-zhwns" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.613153 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4a3ce3bd-7194-4b10-9788-2b570b1814c0-audit-dir\") pod \"apiserver-7bbb656c7d-44nv6\" (UID: \"4a3ce3bd-7194-4b10-9788-2b570b1814c0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44nv6" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.613201 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/4a3ce3bd-7194-4b10-9788-2b570b1814c0-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-44nv6\" (UID: \"4a3ce3bd-7194-4b10-9788-2b570b1814c0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44nv6" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.613234 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ngnl4\" (UniqueName: \"kubernetes.io/projected/66b2a431-ff77-457e-99aa-d4dd40bc4640-kube-api-access-ngnl4\") pod \"controller-manager-879f6c89f-j6lzv\" (UID: \"66b2a431-ff77-457e-99aa-d4dd40bc4640\") " pod="openshift-controller-manager/controller-manager-879f6c89f-j6lzv" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.613265 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/66b2a431-ff77-457e-99aa-d4dd40bc4640-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-j6lzv\" (UID: \"66b2a431-ff77-457e-99aa-d4dd40bc4640\") " pod="openshift-controller-manager/controller-manager-879f6c89f-j6lzv" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.613307 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f420d1df-5091-46b0-be80-9a83e5be1a65-serving-cert\") pod \"apiserver-76f77b778f-zhwns\" (UID: \"f420d1df-5091-46b0-be80-9a83e5be1a65\") " pod="openshift-apiserver/apiserver-76f77b778f-zhwns" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.613301 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-qpmlh" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.613338 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f420d1df-5091-46b0-be80-9a83e5be1a65-audit-dir\") pod \"apiserver-76f77b778f-zhwns\" (UID: \"f420d1df-5091-46b0-be80-9a83e5be1a65\") " pod="openshift-apiserver/apiserver-76f77b778f-zhwns" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.613387 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4a3ce3bd-7194-4b10-9788-2b570b1814c0-audit-policies\") pod \"apiserver-7bbb656c7d-44nv6\" (UID: \"4a3ce3bd-7194-4b10-9788-2b570b1814c0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44nv6" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.613420 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/f420d1df-5091-46b0-be80-9a83e5be1a65-node-pullsecrets\") pod \"apiserver-76f77b778f-zhwns\" (UID: \"f420d1df-5091-46b0-be80-9a83e5be1a65\") " pod="openshift-apiserver/apiserver-76f77b778f-zhwns" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.613450 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/f420d1df-5091-46b0-be80-9a83e5be1a65-etcd-serving-ca\") pod \"apiserver-76f77b778f-zhwns\" (UID: \"f420d1df-5091-46b0-be80-9a83e5be1a65\") " pod="openshift-apiserver/apiserver-76f77b778f-zhwns" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.613486 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f420d1df-5091-46b0-be80-9a83e5be1a65-trusted-ca-bundle\") pod \"apiserver-76f77b778f-zhwns\" (UID: \"f420d1df-5091-46b0-be80-9a83e5be1a65\") " pod="openshift-apiserver/apiserver-76f77b778f-zhwns" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.613520 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/edc8c507-aa54-4f0e-b64f-265ff1860ca0-images\") pod \"machine-api-operator-5694c8668f-fq82t\" (UID: \"edc8c507-aa54-4f0e-b64f-265ff1860ca0\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fq82t" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.613550 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/66b2a431-ff77-457e-99aa-d4dd40bc4640-config\") pod \"controller-manager-879f6c89f-j6lzv\" (UID: \"66b2a431-ff77-457e-99aa-d4dd40bc4640\") " pod="openshift-controller-manager/controller-manager-879f6c89f-j6lzv" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.613581 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f420d1df-5091-46b0-be80-9a83e5be1a65-config\") pod \"apiserver-76f77b778f-zhwns\" (UID: \"f420d1df-5091-46b0-be80-9a83e5be1a65\") " pod="openshift-apiserver/apiserver-76f77b778f-zhwns" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.613627 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/f420d1df-5091-46b0-be80-9a83e5be1a65-audit\") pod \"apiserver-76f77b778f-zhwns\" (UID: \"f420d1df-5091-46b0-be80-9a83e5be1a65\") " pod="openshift-apiserver/apiserver-76f77b778f-zhwns" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.613658 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ttg5x\" (UniqueName: \"kubernetes.io/projected/f420d1df-5091-46b0-be80-9a83e5be1a65-kube-api-access-ttg5x\") pod \"apiserver-76f77b778f-zhwns\" (UID: \"f420d1df-5091-46b0-be80-9a83e5be1a65\") " pod="openshift-apiserver/apiserver-76f77b778f-zhwns" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.613688 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/66b2a431-ff77-457e-99aa-d4dd40bc4640-client-ca\") pod \"controller-manager-879f6c89f-j6lzv\" (UID: \"66b2a431-ff77-457e-99aa-d4dd40bc4640\") " pod="openshift-controller-manager/controller-manager-879f6c89f-j6lzv" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.613731 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/4a3ce3bd-7194-4b10-9788-2b570b1814c0-etcd-client\") pod \"apiserver-7bbb656c7d-44nv6\" (UID: \"4a3ce3bd-7194-4b10-9788-2b570b1814c0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44nv6" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.614288 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-zfpqh" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.615144 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.619282 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.619685 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.619785 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.619844 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.619997 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.626178 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.626529 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.627823 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.628403 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.628804 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.629000 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.629337 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.632095 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.632261 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.633871 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cscv9"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.634497 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-8gbfr"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.634945 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-dnrj5"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.637510 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-dnrj5" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.637657 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.638026 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-8gbfr" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.638604 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cscv9" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.640154 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.642203 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.642212 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.642405 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.643972 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-67pmp"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.644478 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.644597 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.644744 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.644496 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-67pmp" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.644943 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.647561 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.647767 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-f4jjc"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.648480 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-f4jjc" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.667436 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bbsrp"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.668628 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.670688 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.670970 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.671612 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.671974 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.672117 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.672288 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.672536 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.676223 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.694623 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.694859 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-rzc4j"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.695307 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.695386 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.696270 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bbsrp" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.697335 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.697494 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-fz89h"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.697948 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-j6lzv"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.698109 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-fz89h" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.698722 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.701461 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bp6p9"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.701504 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.702275 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bp6p9" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.703698 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-jvmch"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.704964 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jvmch" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.705343 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-djlds"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.706177 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-tcl8h"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.706861 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-tcl8h" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.706986 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-xfn5c"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.706988 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-djlds" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.707951 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xfn5c" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.709974 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4xmkd"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.710938 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4xmkd" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.711096 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.711031 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-b2xpr"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.711748 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-b2xpr" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.712449 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-jngcm"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.712943 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-jngcm" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.713348 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.713543 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.714144 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rp5tn\" (UniqueName: \"kubernetes.io/projected/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-kube-api-access-rp5tn\") pod \"oauth-openshift-558db77b4-dvvh4\" (UID: \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dvvh4" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.714174 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c01a0586-dcf7-41cb-a48f-a593e4436d7a-config\") pod \"etcd-operator-b45778765-dnrj5\" (UID: \"c01a0586-dcf7-41cb-a48f-a593e4436d7a\") " pod="openshift-etcd-operator/etcd-operator-b45778765-dnrj5" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.714196 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x7zdb\" (UniqueName: \"kubernetes.io/projected/67e6a9c5-a695-4683-9df7-cab5bace357c-kube-api-access-x7zdb\") pod \"machine-approver-56656f9798-v6825\" (UID: \"67e6a9c5-a695-4683-9df7-cab5bace357c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-v6825" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.714218 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f420d1df-5091-46b0-be80-9a83e5be1a65-trusted-ca-bundle\") pod \"apiserver-76f77b778f-zhwns\" (UID: \"f420d1df-5091-46b0-be80-9a83e5be1a65\") " pod="openshift-apiserver/apiserver-76f77b778f-zhwns" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.714240 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/edc8c507-aa54-4f0e-b64f-265ff1860ca0-images\") pod \"machine-api-operator-5694c8668f-fq82t\" (UID: \"edc8c507-aa54-4f0e-b64f-265ff1860ca0\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fq82t" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.714263 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5trpn\" (UniqueName: \"kubernetes.io/projected/2bdac139-9c3d-4490-b350-b0513395a281-kube-api-access-5trpn\") pod \"cluster-samples-operator-665b6dd947-xqgjv\" (UID: \"2bdac139-9c3d-4490-b350-b0513395a281\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-xqgjv" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.714283 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/66b2a431-ff77-457e-99aa-d4dd40bc4640-config\") pod \"controller-manager-879f6c89f-j6lzv\" (UID: \"66b2a431-ff77-457e-99aa-d4dd40bc4640\") " pod="openshift-controller-manager/controller-manager-879f6c89f-j6lzv" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.714303 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/c01a0586-dcf7-41cb-a48f-a593e4436d7a-etcd-ca\") pod \"etcd-operator-b45778765-dnrj5\" (UID: \"c01a0586-dcf7-41cb-a48f-a593e4436d7a\") " pod="openshift-etcd-operator/etcd-operator-b45778765-dnrj5" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.714322 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/67e6a9c5-a695-4683-9df7-cab5bace357c-auth-proxy-config\") pod \"machine-approver-56656f9798-v6825\" (UID: \"67e6a9c5-a695-4683-9df7-cab5bace357c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-v6825" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.714337 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/b61fd91c-c774-44fd-9d5e-114aa59a1b39-service-ca\") pod \"console-f9d7485db-8gbfr\" (UID: \"b61fd91c-c774-44fd-9d5e-114aa59a1b39\") " pod="openshift-console/console-f9d7485db-8gbfr" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.714353 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8893bf59-e9ad-45f9-a1eb-bfd3ac808a0b-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-7zdlq\" (UID: \"8893bf59-e9ad-45f9-a1eb-bfd3ac808a0b\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-7zdlq" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.714368 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-dvvh4\" (UID: \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dvvh4" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.714384 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cd78a50e-e3f8-4518-9b93-d8a8d8a08df9-serving-cert\") pod \"openshift-config-operator-7777fb866f-wbk58\" (UID: \"cd78a50e-e3f8-4518-9b93-d8a8d8a08df9\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-wbk58" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.714405 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f420d1df-5091-46b0-be80-9a83e5be1a65-config\") pod \"apiserver-76f77b778f-zhwns\" (UID: \"f420d1df-5091-46b0-be80-9a83e5be1a65\") " pod="openshift-apiserver/apiserver-76f77b778f-zhwns" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.714424 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nzcqn\" (UniqueName: \"kubernetes.io/projected/4872deec-335d-4079-8981-1db7ef98b710-kube-api-access-nzcqn\") pod \"cluster-image-registry-operator-dc59b4c8b-cscv9\" (UID: \"4872deec-335d-4079-8981-1db7ef98b710\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cscv9" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.714439 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-dvvh4\" (UID: \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dvvh4" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.714456 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac283e2f-8cf1-4fae-ab94-8b4d9b57ac0c-config\") pod \"route-controller-manager-6576b87f9c-4m9cx\" (UID: \"ac283e2f-8cf1-4fae-ab94-8b4d9b57ac0c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4m9cx" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.714482 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5ngf9\" (UniqueName: \"kubernetes.io/projected/61a2958b-98ca-4304-956a-5995bdda407d-kube-api-access-5ngf9\") pod \"dns-operator-744455d44c-zfpqh\" (UID: \"61a2958b-98ca-4304-956a-5995bdda407d\") " pod="openshift-dns-operator/dns-operator-744455d44c-zfpqh" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.714497 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/23db3d43-22c2-4425-944c-201c682f382d-serving-cert\") pod \"authentication-operator-69f744f599-4sjxm\" (UID: \"23db3d43-22c2-4425-944c-201c682f382d\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-4sjxm" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.714511 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/67e6a9c5-a695-4683-9df7-cab5bace357c-config\") pod \"machine-approver-56656f9798-v6825\" (UID: \"67e6a9c5-a695-4683-9df7-cab5bace357c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-v6825" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.714525 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8893bf59-e9ad-45f9-a1eb-bfd3ac808a0b-config\") pod \"openshift-apiserver-operator-796bbdcf4f-7zdlq\" (UID: \"8893bf59-e9ad-45f9-a1eb-bfd3ac808a0b\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-7zdlq" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.714546 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/f420d1df-5091-46b0-be80-9a83e5be1a65-audit\") pod \"apiserver-76f77b778f-zhwns\" (UID: \"f420d1df-5091-46b0-be80-9a83e5be1a65\") " pod="openshift-apiserver/apiserver-76f77b778f-zhwns" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.714562 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ttg5x\" (UniqueName: \"kubernetes.io/projected/f420d1df-5091-46b0-be80-9a83e5be1a65-kube-api-access-ttg5x\") pod \"apiserver-76f77b778f-zhwns\" (UID: \"f420d1df-5091-46b0-be80-9a83e5be1a65\") " pod="openshift-apiserver/apiserver-76f77b778f-zhwns" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.714578 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/66b2a431-ff77-457e-99aa-d4dd40bc4640-client-ca\") pod \"controller-manager-879f6c89f-j6lzv\" (UID: \"66b2a431-ff77-457e-99aa-d4dd40bc4640\") " pod="openshift-controller-manager/controller-manager-879f6c89f-j6lzv" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.714593 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/4a3ce3bd-7194-4b10-9788-2b570b1814c0-etcd-client\") pod \"apiserver-7bbb656c7d-44nv6\" (UID: \"4a3ce3bd-7194-4b10-9788-2b570b1814c0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44nv6" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.714610 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-dvvh4\" (UID: \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dvvh4" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.714628 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-dvvh4\" (UID: \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dvvh4" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.714648 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-dvvh4\" (UID: \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dvvh4" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.714702 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/b61fd91c-c774-44fd-9d5e-114aa59a1b39-oauth-serving-cert\") pod \"console-f9d7485db-8gbfr\" (UID: \"b61fd91c-c774-44fd-9d5e-114aa59a1b39\") " pod="openshift-console/console-f9d7485db-8gbfr" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.714723 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b61fd91c-c774-44fd-9d5e-114aa59a1b39-trusted-ca-bundle\") pod \"console-f9d7485db-8gbfr\" (UID: \"b61fd91c-c774-44fd-9d5e-114aa59a1b39\") " pod="openshift-console/console-f9d7485db-8gbfr" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.714738 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/23db3d43-22c2-4425-944c-201c682f382d-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-4sjxm\" (UID: \"23db3d43-22c2-4425-944c-201c682f382d\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-4sjxm" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.714752 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/61a2958b-98ca-4304-956a-5995bdda407d-metrics-tls\") pod \"dns-operator-744455d44c-zfpqh\" (UID: \"61a2958b-98ca-4304-956a-5995bdda407d\") " pod="openshift-dns-operator/dns-operator-744455d44c-zfpqh" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.714770 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-dvvh4\" (UID: \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dvvh4" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.714787 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-dvvh4\" (UID: \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dvvh4" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.714803 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c95dt\" (UniqueName: \"kubernetes.io/projected/edc8c507-aa54-4f0e-b64f-265ff1860ca0-kube-api-access-c95dt\") pod \"machine-api-operator-5694c8668f-fq82t\" (UID: \"edc8c507-aa54-4f0e-b64f-265ff1860ca0\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fq82t" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.714847 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h6dqt\" (UniqueName: \"kubernetes.io/projected/cd78a50e-e3f8-4518-9b93-d8a8d8a08df9-kube-api-access-h6dqt\") pod \"openshift-config-operator-7777fb866f-wbk58\" (UID: \"cd78a50e-e3f8-4518-9b93-d8a8d8a08df9\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-wbk58" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.714864 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ac283e2f-8cf1-4fae-ab94-8b4d9b57ac0c-serving-cert\") pod \"route-controller-manager-6576b87f9c-4m9cx\" (UID: \"ac283e2f-8cf1-4fae-ab94-8b4d9b57ac0c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4m9cx" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.714882 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/edc8c507-aa54-4f0e-b64f-265ff1860ca0-config\") pod \"machine-api-operator-5694c8668f-fq82t\" (UID: \"edc8c507-aa54-4f0e-b64f-265ff1860ca0\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fq82t" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.714917 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/b61fd91c-c774-44fd-9d5e-114aa59a1b39-console-config\") pod \"console-f9d7485db-8gbfr\" (UID: \"b61fd91c-c774-44fd-9d5e-114aa59a1b39\") " pod="openshift-console/console-f9d7485db-8gbfr" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.714941 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vjshw\" (UniqueName: \"kubernetes.io/projected/8893bf59-e9ad-45f9-a1eb-bfd3ac808a0b-kube-api-access-vjshw\") pod \"openshift-apiserver-operator-796bbdcf4f-7zdlq\" (UID: \"8893bf59-e9ad-45f9-a1eb-bfd3ac808a0b\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-7zdlq" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.714984 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/4a3ce3bd-7194-4b10-9788-2b570b1814c0-encryption-config\") pod \"apiserver-7bbb656c7d-44nv6\" (UID: \"4a3ce3bd-7194-4b10-9788-2b570b1814c0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44nv6" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.715003 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4872deec-335d-4079-8981-1db7ef98b710-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-cscv9\" (UID: \"4872deec-335d-4079-8981-1db7ef98b710\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cscv9" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.715019 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/cd78a50e-e3f8-4518-9b93-d8a8d8a08df9-available-featuregates\") pod \"openshift-config-operator-7777fb866f-wbk58\" (UID: \"cd78a50e-e3f8-4518-9b93-d8a8d8a08df9\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-wbk58" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.715036 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/23db3d43-22c2-4425-944c-201c682f382d-service-ca-bundle\") pod \"authentication-operator-69f744f599-4sjxm\" (UID: \"23db3d43-22c2-4425-944c-201c682f382d\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-4sjxm" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.715057 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/67e6a9c5-a695-4683-9df7-cab5bace357c-machine-approver-tls\") pod \"machine-approver-56656f9798-v6825\" (UID: \"67e6a9c5-a695-4683-9df7-cab5bace357c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-v6825" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.715083 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/2bdac139-9c3d-4490-b350-b0513395a281-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-xqgjv\" (UID: \"2bdac139-9c3d-4490-b350-b0513395a281\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-xqgjv" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.715102 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c01a0586-dcf7-41cb-a48f-a593e4436d7a-serving-cert\") pod \"etcd-operator-b45778765-dnrj5\" (UID: \"c01a0586-dcf7-41cb-a48f-a593e4436d7a\") " pod="openshift-etcd-operator/etcd-operator-b45778765-dnrj5" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.715120 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tkmkr\" (UniqueName: \"kubernetes.io/projected/c01a0586-dcf7-41cb-a48f-a593e4436d7a-kube-api-access-tkmkr\") pod \"etcd-operator-b45778765-dnrj5\" (UID: \"c01a0586-dcf7-41cb-a48f-a593e4436d7a\") " pod="openshift-etcd-operator/etcd-operator-b45778765-dnrj5" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.715145 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/f420d1df-5091-46b0-be80-9a83e5be1a65-encryption-config\") pod \"apiserver-76f77b778f-zhwns\" (UID: \"f420d1df-5091-46b0-be80-9a83e5be1a65\") " pod="openshift-apiserver/apiserver-76f77b778f-zhwns" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.715166 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/edc8c507-aa54-4f0e-b64f-265ff1860ca0-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-fq82t\" (UID: \"edc8c507-aa54-4f0e-b64f-265ff1860ca0\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fq82t" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.715183 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e537e250-466e-4019-a6bf-57b8301b954e-trusted-ca\") pod \"console-operator-58897d9998-m9sp2\" (UID: \"e537e250-466e-4019-a6bf-57b8301b954e\") " pod="openshift-console-operator/console-operator-58897d9998-m9sp2" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.715198 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/b61fd91c-c774-44fd-9d5e-114aa59a1b39-console-serving-cert\") pod \"console-f9d7485db-8gbfr\" (UID: \"b61fd91c-c774-44fd-9d5e-114aa59a1b39\") " pod="openshift-console/console-f9d7485db-8gbfr" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.715195 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/edc8c507-aa54-4f0e-b64f-265ff1860ca0-images\") pod \"machine-api-operator-5694c8668f-fq82t\" (UID: \"edc8c507-aa54-4f0e-b64f-265ff1860ca0\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fq82t" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.715215 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/f420d1df-5091-46b0-be80-9a83e5be1a65-image-import-ca\") pod \"apiserver-76f77b778f-zhwns\" (UID: \"f420d1df-5091-46b0-be80-9a83e5be1a65\") " pod="openshift-apiserver/apiserver-76f77b778f-zhwns" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.715283 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/66b2a431-ff77-457e-99aa-d4dd40bc4640-serving-cert\") pod \"controller-manager-879f6c89f-j6lzv\" (UID: \"66b2a431-ff77-457e-99aa-d4dd40bc4640\") " pod="openshift-controller-manager/controller-manager-879f6c89f-j6lzv" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.715353 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f420d1df-5091-46b0-be80-9a83e5be1a65-trusted-ca-bundle\") pod \"apiserver-76f77b778f-zhwns\" (UID: \"f420d1df-5091-46b0-be80-9a83e5be1a65\") " pod="openshift-apiserver/apiserver-76f77b778f-zhwns" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.715567 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-audit-dir\") pod \"oauth-openshift-558db77b4-dvvh4\" (UID: \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dvvh4" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.715591 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/b61fd91c-c774-44fd-9d5e-114aa59a1b39-console-oauth-config\") pod \"console-f9d7485db-8gbfr\" (UID: \"b61fd91c-c774-44fd-9d5e-114aa59a1b39\") " pod="openshift-console/console-f9d7485db-8gbfr" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.715615 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4a3ce3bd-7194-4b10-9788-2b570b1814c0-serving-cert\") pod \"apiserver-7bbb656c7d-44nv6\" (UID: \"4a3ce3bd-7194-4b10-9788-2b570b1814c0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44nv6" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.715637 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4a3ce3bd-7194-4b10-9788-2b570b1814c0-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-44nv6\" (UID: \"4a3ce3bd-7194-4b10-9788-2b570b1814c0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44nv6" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.715661 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9n7pb\" (UniqueName: \"kubernetes.io/projected/4a3ce3bd-7194-4b10-9788-2b570b1814c0-kube-api-access-9n7pb\") pod \"apiserver-7bbb656c7d-44nv6\" (UID: \"4a3ce3bd-7194-4b10-9788-2b570b1814c0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44nv6" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.715696 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/f420d1df-5091-46b0-be80-9a83e5be1a65-etcd-client\") pod \"apiserver-76f77b778f-zhwns\" (UID: \"f420d1df-5091-46b0-be80-9a83e5be1a65\") " pod="openshift-apiserver/apiserver-76f77b778f-zhwns" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.715718 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4a3ce3bd-7194-4b10-9788-2b570b1814c0-audit-dir\") pod \"apiserver-7bbb656c7d-44nv6\" (UID: \"4a3ce3bd-7194-4b10-9788-2b570b1814c0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44nv6" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.715754 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-dvvh4\" (UID: \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dvvh4" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.715775 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ac283e2f-8cf1-4fae-ab94-8b4d9b57ac0c-client-ca\") pod \"route-controller-manager-6576b87f9c-4m9cx\" (UID: \"ac283e2f-8cf1-4fae-ab94-8b4d9b57ac0c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4m9cx" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.715795 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4872deec-335d-4079-8981-1db7ef98b710-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-cscv9\" (UID: \"4872deec-335d-4079-8981-1db7ef98b710\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cscv9" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.715815 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wjvcq\" (UniqueName: \"kubernetes.io/projected/b61fd91c-c774-44fd-9d5e-114aa59a1b39-kube-api-access-wjvcq\") pod \"console-f9d7485db-8gbfr\" (UID: \"b61fd91c-c774-44fd-9d5e-114aa59a1b39\") " pod="openshift-console/console-f9d7485db-8gbfr" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.715841 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/4a3ce3bd-7194-4b10-9788-2b570b1814c0-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-44nv6\" (UID: \"4a3ce3bd-7194-4b10-9788-2b570b1814c0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44nv6" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.715865 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ngnl4\" (UniqueName: \"kubernetes.io/projected/66b2a431-ff77-457e-99aa-d4dd40bc4640-kube-api-access-ngnl4\") pod \"controller-manager-879f6c89f-j6lzv\" (UID: \"66b2a431-ff77-457e-99aa-d4dd40bc4640\") " pod="openshift-controller-manager/controller-manager-879f6c89f-j6lzv" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.716000 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f420d1df-5091-46b0-be80-9a83e5be1a65-config\") pod \"apiserver-76f77b778f-zhwns\" (UID: \"f420d1df-5091-46b0-be80-9a83e5be1a65\") " pod="openshift-apiserver/apiserver-76f77b778f-zhwns" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.716026 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/66b2a431-ff77-457e-99aa-d4dd40bc4640-config\") pod \"controller-manager-879f6c89f-j6lzv\" (UID: \"66b2a431-ff77-457e-99aa-d4dd40bc4640\") " pod="openshift-controller-manager/controller-manager-879f6c89f-j6lzv" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.716466 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4a3ce3bd-7194-4b10-9788-2b570b1814c0-audit-dir\") pod \"apiserver-7bbb656c7d-44nv6\" (UID: \"4a3ce3bd-7194-4b10-9788-2b570b1814c0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44nv6" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.716932 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/f420d1df-5091-46b0-be80-9a83e5be1a65-audit\") pod \"apiserver-76f77b778f-zhwns\" (UID: \"f420d1df-5091-46b0-be80-9a83e5be1a65\") " pod="openshift-apiserver/apiserver-76f77b778f-zhwns" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.717139 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/edc8c507-aa54-4f0e-b64f-265ff1860ca0-config\") pod \"machine-api-operator-5694c8668f-fq82t\" (UID: \"edc8c507-aa54-4f0e-b64f-265ff1860ca0\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fq82t" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.717463 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/f420d1df-5091-46b0-be80-9a83e5be1a65-image-import-ca\") pod \"apiserver-76f77b778f-zhwns\" (UID: \"f420d1df-5091-46b0-be80-9a83e5be1a65\") " pod="openshift-apiserver/apiserver-76f77b778f-zhwns" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.718991 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/66b2a431-ff77-457e-99aa-d4dd40bc4640-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-j6lzv\" (UID: \"66b2a431-ff77-457e-99aa-d4dd40bc4640\") " pod="openshift-controller-manager/controller-manager-879f6c89f-j6lzv" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.719048 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/c01a0586-dcf7-41cb-a48f-a593e4436d7a-etcd-service-ca\") pod \"etcd-operator-b45778765-dnrj5\" (UID: \"c01a0586-dcf7-41cb-a48f-a593e4436d7a\") " pod="openshift-etcd-operator/etcd-operator-b45778765-dnrj5" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.719121 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e537e250-466e-4019-a6bf-57b8301b954e-serving-cert\") pod \"console-operator-58897d9998-m9sp2\" (UID: \"e537e250-466e-4019-a6bf-57b8301b954e\") " pod="openshift-console-operator/console-operator-58897d9998-m9sp2" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.719162 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b2xkn\" (UniqueName: \"kubernetes.io/projected/d48d8d73-7021-4ebd-8321-b7f73e330c1c-kube-api-access-b2xkn\") pod \"downloads-7954f5f757-qpmlh\" (UID: \"d48d8d73-7021-4ebd-8321-b7f73e330c1c\") " pod="openshift-console/downloads-7954f5f757-qpmlh" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.719227 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f420d1df-5091-46b0-be80-9a83e5be1a65-serving-cert\") pod \"apiserver-76f77b778f-zhwns\" (UID: \"f420d1df-5091-46b0-be80-9a83e5be1a65\") " pod="openshift-apiserver/apiserver-76f77b778f-zhwns" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.719278 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f420d1df-5091-46b0-be80-9a83e5be1a65-audit-dir\") pod \"apiserver-76f77b778f-zhwns\" (UID: \"f420d1df-5091-46b0-be80-9a83e5be1a65\") " pod="openshift-apiserver/apiserver-76f77b778f-zhwns" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.719311 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-dvvh4\" (UID: \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dvvh4" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.719322 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/4a3ce3bd-7194-4b10-9788-2b570b1814c0-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-44nv6\" (UID: \"4a3ce3bd-7194-4b10-9788-2b570b1814c0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44nv6" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.719388 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f420d1df-5091-46b0-be80-9a83e5be1a65-audit-dir\") pod \"apiserver-76f77b778f-zhwns\" (UID: \"f420d1df-5091-46b0-be80-9a83e5be1a65\") " pod="openshift-apiserver/apiserver-76f77b778f-zhwns" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.719389 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rgnqn\" (UniqueName: \"kubernetes.io/projected/23db3d43-22c2-4425-944c-201c682f382d-kube-api-access-rgnqn\") pod \"authentication-operator-69f744f599-4sjxm\" (UID: \"23db3d43-22c2-4425-944c-201c682f382d\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-4sjxm" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.720058 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4a3ce3bd-7194-4b10-9788-2b570b1814c0-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-44nv6\" (UID: \"4a3ce3bd-7194-4b10-9788-2b570b1814c0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44nv6" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.721385 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lrvjs\" (UniqueName: \"kubernetes.io/projected/e537e250-466e-4019-a6bf-57b8301b954e-kube-api-access-lrvjs\") pod \"console-operator-58897d9998-m9sp2\" (UID: \"e537e250-466e-4019-a6bf-57b8301b954e\") " pod="openshift-console-operator/console-operator-58897d9998-m9sp2" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.721427 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hp25w\" (UniqueName: \"kubernetes.io/projected/ac283e2f-8cf1-4fae-ab94-8b4d9b57ac0c-kube-api-access-hp25w\") pod \"route-controller-manager-6576b87f9c-4m9cx\" (UID: \"ac283e2f-8cf1-4fae-ab94-8b4d9b57ac0c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4m9cx" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.721455 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4a3ce3bd-7194-4b10-9788-2b570b1814c0-audit-policies\") pod \"apiserver-7bbb656c7d-44nv6\" (UID: \"4a3ce3bd-7194-4b10-9788-2b570b1814c0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44nv6" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.721473 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/f420d1df-5091-46b0-be80-9a83e5be1a65-node-pullsecrets\") pod \"apiserver-76f77b778f-zhwns\" (UID: \"f420d1df-5091-46b0-be80-9a83e5be1a65\") " pod="openshift-apiserver/apiserver-76f77b778f-zhwns" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.721491 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/4872deec-335d-4079-8981-1db7ef98b710-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-cscv9\" (UID: \"4872deec-335d-4079-8981-1db7ef98b710\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cscv9" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.721550 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-dvvh4\" (UID: \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dvvh4" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.721568 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/f420d1df-5091-46b0-be80-9a83e5be1a65-etcd-serving-ca\") pod \"apiserver-76f77b778f-zhwns\" (UID: \"f420d1df-5091-46b0-be80-9a83e5be1a65\") " pod="openshift-apiserver/apiserver-76f77b778f-zhwns" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.721586 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e537e250-466e-4019-a6bf-57b8301b954e-config\") pod \"console-operator-58897d9998-m9sp2\" (UID: \"e537e250-466e-4019-a6bf-57b8301b954e\") " pod="openshift-console-operator/console-operator-58897d9998-m9sp2" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.721601 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-dvvh4\" (UID: \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dvvh4" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.721618 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/c01a0586-dcf7-41cb-a48f-a593e4436d7a-etcd-client\") pod \"etcd-operator-b45778765-dnrj5\" (UID: \"c01a0586-dcf7-41cb-a48f-a593e4436d7a\") " pod="openshift-etcd-operator/etcd-operator-b45778765-dnrj5" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.721635 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/23db3d43-22c2-4425-944c-201c682f382d-config\") pod \"authentication-operator-69f744f599-4sjxm\" (UID: \"23db3d43-22c2-4425-944c-201c682f382d\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-4sjxm" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.721653 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-audit-policies\") pod \"oauth-openshift-558db77b4-dvvh4\" (UID: \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dvvh4" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.721761 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4a3ce3bd-7194-4b10-9788-2b570b1814c0-serving-cert\") pod \"apiserver-7bbb656c7d-44nv6\" (UID: \"4a3ce3bd-7194-4b10-9788-2b570b1814c0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44nv6" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.722073 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/f420d1df-5091-46b0-be80-9a83e5be1a65-encryption-config\") pod \"apiserver-76f77b778f-zhwns\" (UID: \"f420d1df-5091-46b0-be80-9a83e5be1a65\") " pod="openshift-apiserver/apiserver-76f77b778f-zhwns" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.722273 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f420d1df-5091-46b0-be80-9a83e5be1a65-serving-cert\") pod \"apiserver-76f77b778f-zhwns\" (UID: \"f420d1df-5091-46b0-be80-9a83e5be1a65\") " pod="openshift-apiserver/apiserver-76f77b778f-zhwns" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.722361 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.722946 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/f420d1df-5091-46b0-be80-9a83e5be1a65-node-pullsecrets\") pod \"apiserver-76f77b778f-zhwns\" (UID: \"f420d1df-5091-46b0-be80-9a83e5be1a65\") " pod="openshift-apiserver/apiserver-76f77b778f-zhwns" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.722973 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/f420d1df-5091-46b0-be80-9a83e5be1a65-etcd-serving-ca\") pod \"apiserver-76f77b778f-zhwns\" (UID: \"f420d1df-5091-46b0-be80-9a83e5be1a65\") " pod="openshift-apiserver/apiserver-76f77b778f-zhwns" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.723003 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/4a3ce3bd-7194-4b10-9788-2b570b1814c0-encryption-config\") pod \"apiserver-7bbb656c7d-44nv6\" (UID: \"4a3ce3bd-7194-4b10-9788-2b570b1814c0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44nv6" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.724044 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/66b2a431-ff77-457e-99aa-d4dd40bc4640-client-ca\") pod \"controller-manager-879f6c89f-j6lzv\" (UID: \"66b2a431-ff77-457e-99aa-d4dd40bc4640\") " pod="openshift-controller-manager/controller-manager-879f6c89f-j6lzv" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.724641 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/edc8c507-aa54-4f0e-b64f-265ff1860ca0-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-fq82t\" (UID: \"edc8c507-aa54-4f0e-b64f-265ff1860ca0\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fq82t" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.725281 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/66b2a431-ff77-457e-99aa-d4dd40bc4640-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-j6lzv\" (UID: \"66b2a431-ff77-457e-99aa-d4dd40bc4640\") " pod="openshift-controller-manager/controller-manager-879f6c89f-j6lzv" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.725726 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/f420d1df-5091-46b0-be80-9a83e5be1a65-etcd-client\") pod \"apiserver-76f77b778f-zhwns\" (UID: \"f420d1df-5091-46b0-be80-9a83e5be1a65\") " pod="openshift-apiserver/apiserver-76f77b778f-zhwns" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.727576 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.727673 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4a3ce3bd-7194-4b10-9788-2b570b1814c0-audit-policies\") pod \"apiserver-7bbb656c7d-44nv6\" (UID: \"4a3ce3bd-7194-4b10-9788-2b570b1814c0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44nv6" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.738251 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/4a3ce3bd-7194-4b10-9788-2b570b1814c0-etcd-client\") pod \"apiserver-7bbb656c7d-44nv6\" (UID: \"4a3ce3bd-7194-4b10-9788-2b570b1814c0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44nv6" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.746379 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/66b2a431-ff77-457e-99aa-d4dd40bc4640-serving-cert\") pod \"controller-manager-879f6c89f-j6lzv\" (UID: \"66b2a431-ff77-457e-99aa-d4dd40bc4640\") " pod="openshift-controller-manager/controller-manager-879f6c89f-j6lzv" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.748228 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.759799 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mvxx7"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.761080 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-sfkpc"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.761235 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mvxx7" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.762082 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.762233 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2tdjp"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.762301 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-sfkpc" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.763691 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-dkmtc"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.763779 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2tdjp" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.765050 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-blxvw"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.765429 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-7zdlq"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.765454 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-44nv6"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.765467 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29318970-52fch"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.765817 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qwpch"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.766365 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-xqgjv"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.766387 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-b8g42"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.766716 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-dvvh4"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.766739 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-4m9cx"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.766750 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-m9sp2"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.766759 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-zhwns"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.766769 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-jx6p6"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.768065 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-8gbfr"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.768222 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-zfpqh"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.768237 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-rzc4j"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.768284 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-fq82t"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.768296 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-b2xpr"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.768800 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-blxvw" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.768957 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-dkmtc" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.768964 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bp6p9"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.769072 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29318970-52fch" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.769293 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-jx6p6" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.769331 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qwpch" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.769370 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-b8g42" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.770001 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-xfn5c"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.771046 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-fz89h"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.772033 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bbsrp"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.773074 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-67pmp"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.774073 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-wbk58"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.775089 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cscv9"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.776142 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-4sjxm"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.777247 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-qpmlh"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.778379 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-jvmch"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.779470 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-dnrj5"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.780013 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.780454 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-jngcm"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.781488 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-sfkpc"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.782439 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-xhrzk"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.782985 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-xhrzk" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.783443 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-vkr5d"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.784493 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-b8g42"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.784611 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-vkr5d" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.785432 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mvxx7"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.786486 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-djlds"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.787534 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-jx6p6"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.788619 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-dkmtc"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.789631 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qwpch"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.790655 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-tcl8h"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.791642 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29318970-52fch"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.792689 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2tdjp"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.793638 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-xhrzk"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.794585 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4xmkd"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.795530 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-vkr5d"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.796452 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-blxvw"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.797416 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-sp4jc"] Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.797874 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-sp4jc" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.820680 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.822731 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e537e250-466e-4019-a6bf-57b8301b954e-trusted-ca\") pod \"console-operator-58897d9998-m9sp2\" (UID: \"e537e250-466e-4019-a6bf-57b8301b954e\") " pod="openshift-console-operator/console-operator-58897d9998-m9sp2" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.822755 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c01a0586-dcf7-41cb-a48f-a593e4436d7a-serving-cert\") pod \"etcd-operator-b45778765-dnrj5\" (UID: \"c01a0586-dcf7-41cb-a48f-a593e4436d7a\") " pod="openshift-etcd-operator/etcd-operator-b45778765-dnrj5" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.822776 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tkmkr\" (UniqueName: \"kubernetes.io/projected/c01a0586-dcf7-41cb-a48f-a593e4436d7a-kube-api-access-tkmkr\") pod \"etcd-operator-b45778765-dnrj5\" (UID: \"c01a0586-dcf7-41cb-a48f-a593e4436d7a\") " pod="openshift-etcd-operator/etcd-operator-b45778765-dnrj5" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.824051 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e537e250-466e-4019-a6bf-57b8301b954e-trusted-ca\") pod \"console-operator-58897d9998-m9sp2\" (UID: \"e537e250-466e-4019-a6bf-57b8301b954e\") " pod="openshift-console-operator/console-operator-58897d9998-m9sp2" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.824110 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/b61fd91c-c774-44fd-9d5e-114aa59a1b39-console-serving-cert\") pod \"console-f9d7485db-8gbfr\" (UID: \"b61fd91c-c774-44fd-9d5e-114aa59a1b39\") " pod="openshift-console/console-f9d7485db-8gbfr" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.824135 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-audit-dir\") pod \"oauth-openshift-558db77b4-dvvh4\" (UID: \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dvvh4" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.824155 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/b61fd91c-c774-44fd-9d5e-114aa59a1b39-console-oauth-config\") pod \"console-f9d7485db-8gbfr\" (UID: \"b61fd91c-c774-44fd-9d5e-114aa59a1b39\") " pod="openshift-console/console-f9d7485db-8gbfr" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.824212 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nb9dg\" (UniqueName: \"kubernetes.io/projected/580e28be-3dc6-4a6d-ab7f-3d89bae5ed8f-kube-api-access-nb9dg\") pod \"machine-config-controller-84d6567774-djlds\" (UID: \"580e28be-3dc6-4a6d-ab7f-3d89bae5ed8f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-djlds" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.824236 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/23c99006-27db-445e-ae39-365ca63ae52e-profile-collector-cert\") pod \"olm-operator-6b444d44fb-b2xpr\" (UID: \"23c99006-27db-445e-ae39-365ca63ae52e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-b2xpr" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.824260 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-dvvh4\" (UID: \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dvvh4" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.824280 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ac283e2f-8cf1-4fae-ab94-8b4d9b57ac0c-client-ca\") pod \"route-controller-manager-6576b87f9c-4m9cx\" (UID: \"ac283e2f-8cf1-4fae-ab94-8b4d9b57ac0c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4m9cx" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.824362 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4872deec-335d-4079-8981-1db7ef98b710-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-cscv9\" (UID: \"4872deec-335d-4079-8981-1db7ef98b710\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cscv9" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.824391 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wjvcq\" (UniqueName: \"kubernetes.io/projected/b61fd91c-c774-44fd-9d5e-114aa59a1b39-kube-api-access-wjvcq\") pod \"console-f9d7485db-8gbfr\" (UID: \"b61fd91c-c774-44fd-9d5e-114aa59a1b39\") " pod="openshift-console/console-f9d7485db-8gbfr" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.824417 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e63abe9a-f86e-4dfa-9ddc-3a141ccff375-webhook-cert\") pod \"packageserver-d55dfcdfc-4xmkd\" (UID: \"e63abe9a-f86e-4dfa-9ddc-3a141ccff375\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4xmkd" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.824439 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b2287b55-49f7-492d-a256-d300d4fee9c8-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-fz89h\" (UID: \"b2287b55-49f7-492d-a256-d300d4fee9c8\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-fz89h" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.824472 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/c01a0586-dcf7-41cb-a48f-a593e4436d7a-etcd-service-ca\") pod \"etcd-operator-b45778765-dnrj5\" (UID: \"c01a0586-dcf7-41cb-a48f-a593e4436d7a\") " pod="openshift-etcd-operator/etcd-operator-b45778765-dnrj5" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.824494 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e537e250-466e-4019-a6bf-57b8301b954e-serving-cert\") pod \"console-operator-58897d9998-m9sp2\" (UID: \"e537e250-466e-4019-a6bf-57b8301b954e\") " pod="openshift-console-operator/console-operator-58897d9998-m9sp2" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.824557 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b2xkn\" (UniqueName: \"kubernetes.io/projected/d48d8d73-7021-4ebd-8321-b7f73e330c1c-kube-api-access-b2xkn\") pod \"downloads-7954f5f757-qpmlh\" (UID: \"d48d8d73-7021-4ebd-8321-b7f73e330c1c\") " pod="openshift-console/downloads-7954f5f757-qpmlh" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.824580 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-dvvh4\" (UID: \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dvvh4" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.824619 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rgnqn\" (UniqueName: \"kubernetes.io/projected/23db3d43-22c2-4425-944c-201c682f382d-kube-api-access-rgnqn\") pod \"authentication-operator-69f744f599-4sjxm\" (UID: \"23db3d43-22c2-4425-944c-201c682f382d\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-4sjxm" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.824642 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lrvjs\" (UniqueName: \"kubernetes.io/projected/e537e250-466e-4019-a6bf-57b8301b954e-kube-api-access-lrvjs\") pod \"console-operator-58897d9998-m9sp2\" (UID: \"e537e250-466e-4019-a6bf-57b8301b954e\") " pod="openshift-console-operator/console-operator-58897d9998-m9sp2" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.824665 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hp25w\" (UniqueName: \"kubernetes.io/projected/ac283e2f-8cf1-4fae-ab94-8b4d9b57ac0c-kube-api-access-hp25w\") pod \"route-controller-manager-6576b87f9c-4m9cx\" (UID: \"ac283e2f-8cf1-4fae-ab94-8b4d9b57ac0c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4m9cx" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.824691 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/a5e4b9fb-8a86-4ab6-8d80-7007127d1b1a-auth-proxy-config\") pod \"machine-config-operator-74547568cd-xfn5c\" (UID: \"a5e4b9fb-8a86-4ab6-8d80-7007127d1b1a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xfn5c" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.824713 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/e63abe9a-f86e-4dfa-9ddc-3a141ccff375-tmpfs\") pod \"packageserver-d55dfcdfc-4xmkd\" (UID: \"e63abe9a-f86e-4dfa-9ddc-3a141ccff375\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4xmkd" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.824739 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/4872deec-335d-4079-8981-1db7ef98b710-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-cscv9\" (UID: \"4872deec-335d-4079-8981-1db7ef98b710\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cscv9" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.824762 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-dvvh4\" (UID: \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dvvh4" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.824799 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e537e250-466e-4019-a6bf-57b8301b954e-config\") pod \"console-operator-58897d9998-m9sp2\" (UID: \"e537e250-466e-4019-a6bf-57b8301b954e\") " pod="openshift-console-operator/console-operator-58897d9998-m9sp2" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.824824 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-dvvh4\" (UID: \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dvvh4" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.824846 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-audit-policies\") pod \"oauth-openshift-558db77b4-dvvh4\" (UID: \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dvvh4" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.824867 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/c01a0586-dcf7-41cb-a48f-a593e4436d7a-etcd-client\") pod \"etcd-operator-b45778765-dnrj5\" (UID: \"c01a0586-dcf7-41cb-a48f-a593e4436d7a\") " pod="openshift-etcd-operator/etcd-operator-b45778765-dnrj5" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.824886 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/23db3d43-22c2-4425-944c-201c682f382d-config\") pod \"authentication-operator-69f744f599-4sjxm\" (UID: \"23db3d43-22c2-4425-944c-201c682f382d\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-4sjxm" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.824938 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rp5tn\" (UniqueName: \"kubernetes.io/projected/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-kube-api-access-rp5tn\") pod \"oauth-openshift-558db77b4-dvvh4\" (UID: \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dvvh4" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.824958 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c01a0586-dcf7-41cb-a48f-a593e4436d7a-config\") pod \"etcd-operator-b45778765-dnrj5\" (UID: \"c01a0586-dcf7-41cb-a48f-a593e4436d7a\") " pod="openshift-etcd-operator/etcd-operator-b45778765-dnrj5" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.824979 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x7zdb\" (UniqueName: \"kubernetes.io/projected/67e6a9c5-a695-4683-9df7-cab5bace357c-kube-api-access-x7zdb\") pod \"machine-approver-56656f9798-v6825\" (UID: \"67e6a9c5-a695-4683-9df7-cab5bace357c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-v6825" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.825001 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-57fml\" (UniqueName: \"kubernetes.io/projected/997b01e5-23d3-47fe-953f-ca0100bdb0b6-kube-api-access-57fml\") pod \"migrator-59844c95c7-tcl8h\" (UID: \"997b01e5-23d3-47fe-953f-ca0100bdb0b6\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-tcl8h" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.825037 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/580e28be-3dc6-4a6d-ab7f-3d89bae5ed8f-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-djlds\" (UID: \"580e28be-3dc6-4a6d-ab7f-3d89bae5ed8f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-djlds" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.825132 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-audit-dir\") pod \"oauth-openshift-558db77b4-dvvh4\" (UID: \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dvvh4" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.825419 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5trpn\" (UniqueName: \"kubernetes.io/projected/2bdac139-9c3d-4490-b350-b0513395a281-kube-api-access-5trpn\") pod \"cluster-samples-operator-665b6dd947-xqgjv\" (UID: \"2bdac139-9c3d-4490-b350-b0513395a281\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-xqgjv" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.825449 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e63abe9a-f86e-4dfa-9ddc-3a141ccff375-apiservice-cert\") pod \"packageserver-d55dfcdfc-4xmkd\" (UID: \"e63abe9a-f86e-4dfa-9ddc-3a141ccff375\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4xmkd" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.825472 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/b61fd91c-c774-44fd-9d5e-114aa59a1b39-service-ca\") pod \"console-f9d7485db-8gbfr\" (UID: \"b61fd91c-c774-44fd-9d5e-114aa59a1b39\") " pod="openshift-console/console-f9d7485db-8gbfr" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.825493 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8893bf59-e9ad-45f9-a1eb-bfd3ac808a0b-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-7zdlq\" (UID: \"8893bf59-e9ad-45f9-a1eb-bfd3ac808a0b\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-7zdlq" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.825514 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/a5e4b9fb-8a86-4ab6-8d80-7007127d1b1a-images\") pod \"machine-config-operator-74547568cd-xfn5c\" (UID: \"a5e4b9fb-8a86-4ab6-8d80-7007127d1b1a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xfn5c" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.825536 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/c01a0586-dcf7-41cb-a48f-a593e4436d7a-etcd-ca\") pod \"etcd-operator-b45778765-dnrj5\" (UID: \"c01a0586-dcf7-41cb-a48f-a593e4436d7a\") " pod="openshift-etcd-operator/etcd-operator-b45778765-dnrj5" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.825557 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/67e6a9c5-a695-4683-9df7-cab5bace357c-auth-proxy-config\") pod \"machine-approver-56656f9798-v6825\" (UID: \"67e6a9c5-a695-4683-9df7-cab5bace357c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-v6825" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.825611 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-dvvh4\" (UID: \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dvvh4" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.825632 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cd78a50e-e3f8-4518-9b93-d8a8d8a08df9-serving-cert\") pod \"openshift-config-operator-7777fb866f-wbk58\" (UID: \"cd78a50e-e3f8-4518-9b93-d8a8d8a08df9\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-wbk58" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.825653 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-dvvh4\" (UID: \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dvvh4" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.825673 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac283e2f-8cf1-4fae-ab94-8b4d9b57ac0c-config\") pod \"route-controller-manager-6576b87f9c-4m9cx\" (UID: \"ac283e2f-8cf1-4fae-ab94-8b4d9b57ac0c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4m9cx" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.825695 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ldl7z\" (UniqueName: \"kubernetes.io/projected/a5e4b9fb-8a86-4ab6-8d80-7007127d1b1a-kube-api-access-ldl7z\") pod \"machine-config-operator-74547568cd-xfn5c\" (UID: \"a5e4b9fb-8a86-4ab6-8d80-7007127d1b1a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xfn5c" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.825715 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4d8dh\" (UniqueName: \"kubernetes.io/projected/23c99006-27db-445e-ae39-365ca63ae52e-kube-api-access-4d8dh\") pod \"olm-operator-6b444d44fb-b2xpr\" (UID: \"23c99006-27db-445e-ae39-365ca63ae52e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-b2xpr" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.825739 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nzcqn\" (UniqueName: \"kubernetes.io/projected/4872deec-335d-4079-8981-1db7ef98b710-kube-api-access-nzcqn\") pod \"cluster-image-registry-operator-dc59b4c8b-cscv9\" (UID: \"4872deec-335d-4079-8981-1db7ef98b710\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cscv9" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.825761 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/a5e4b9fb-8a86-4ab6-8d80-7007127d1b1a-proxy-tls\") pod \"machine-config-operator-74547568cd-xfn5c\" (UID: \"a5e4b9fb-8a86-4ab6-8d80-7007127d1b1a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xfn5c" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.825798 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5ngf9\" (UniqueName: \"kubernetes.io/projected/61a2958b-98ca-4304-956a-5995bdda407d-kube-api-access-5ngf9\") pod \"dns-operator-744455d44c-zfpqh\" (UID: \"61a2958b-98ca-4304-956a-5995bdda407d\") " pod="openshift-dns-operator/dns-operator-744455d44c-zfpqh" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.825820 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/23db3d43-22c2-4425-944c-201c682f382d-serving-cert\") pod \"authentication-operator-69f744f599-4sjxm\" (UID: \"23db3d43-22c2-4425-944c-201c682f382d\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-4sjxm" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.825849 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/67e6a9c5-a695-4683-9df7-cab5bace357c-config\") pod \"machine-approver-56656f9798-v6825\" (UID: \"67e6a9c5-a695-4683-9df7-cab5bace357c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-v6825" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.825870 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8893bf59-e9ad-45f9-a1eb-bfd3ac808a0b-config\") pod \"openshift-apiserver-operator-796bbdcf4f-7zdlq\" (UID: \"8893bf59-e9ad-45f9-a1eb-bfd3ac808a0b\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-7zdlq" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.825931 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dj6xn\" (UniqueName: \"kubernetes.io/projected/e63abe9a-f86e-4dfa-9ddc-3a141ccff375-kube-api-access-dj6xn\") pod \"packageserver-d55dfcdfc-4xmkd\" (UID: \"e63abe9a-f86e-4dfa-9ddc-3a141ccff375\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4xmkd" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.825956 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-dvvh4\" (UID: \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dvvh4" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.825979 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-dvvh4\" (UID: \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dvvh4" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.826002 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-dvvh4\" (UID: \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dvvh4" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.826021 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/b61fd91c-c774-44fd-9d5e-114aa59a1b39-oauth-serving-cert\") pod \"console-f9d7485db-8gbfr\" (UID: \"b61fd91c-c774-44fd-9d5e-114aa59a1b39\") " pod="openshift-console/console-f9d7485db-8gbfr" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.826042 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b61fd91c-c774-44fd-9d5e-114aa59a1b39-trusted-ca-bundle\") pod \"console-f9d7485db-8gbfr\" (UID: \"b61fd91c-c774-44fd-9d5e-114aa59a1b39\") " pod="openshift-console/console-f9d7485db-8gbfr" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.826063 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/23db3d43-22c2-4425-944c-201c682f382d-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-4sjxm\" (UID: \"23db3d43-22c2-4425-944c-201c682f382d\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-4sjxm" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.826081 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/61a2958b-98ca-4304-956a-5995bdda407d-metrics-tls\") pod \"dns-operator-744455d44c-zfpqh\" (UID: \"61a2958b-98ca-4304-956a-5995bdda407d\") " pod="openshift-dns-operator/dns-operator-744455d44c-zfpqh" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.826101 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-dvvh4\" (UID: \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dvvh4" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.826124 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-dvvh4\" (UID: \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dvvh4" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.826153 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/23c99006-27db-445e-ae39-365ca63ae52e-srv-cert\") pod \"olm-operator-6b444d44fb-b2xpr\" (UID: \"23c99006-27db-445e-ae39-365ca63ae52e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-b2xpr" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.826177 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h6dqt\" (UniqueName: \"kubernetes.io/projected/cd78a50e-e3f8-4518-9b93-d8a8d8a08df9-kube-api-access-h6dqt\") pod \"openshift-config-operator-7777fb866f-wbk58\" (UID: \"cd78a50e-e3f8-4518-9b93-d8a8d8a08df9\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-wbk58" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.826199 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ac283e2f-8cf1-4fae-ab94-8b4d9b57ac0c-serving-cert\") pod \"route-controller-manager-6576b87f9c-4m9cx\" (UID: \"ac283e2f-8cf1-4fae-ab94-8b4d9b57ac0c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4m9cx" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.826221 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b2287b55-49f7-492d-a256-d300d4fee9c8-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-fz89h\" (UID: \"b2287b55-49f7-492d-a256-d300d4fee9c8\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-fz89h" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.826277 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/580e28be-3dc6-4a6d-ab7f-3d89bae5ed8f-proxy-tls\") pod \"machine-config-controller-84d6567774-djlds\" (UID: \"580e28be-3dc6-4a6d-ab7f-3d89bae5ed8f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-djlds" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.826301 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4872deec-335d-4079-8981-1db7ef98b710-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-cscv9\" (UID: \"4872deec-335d-4079-8981-1db7ef98b710\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cscv9" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.826324 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/cd78a50e-e3f8-4518-9b93-d8a8d8a08df9-available-featuregates\") pod \"openshift-config-operator-7777fb866f-wbk58\" (UID: \"cd78a50e-e3f8-4518-9b93-d8a8d8a08df9\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-wbk58" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.826344 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/b61fd91c-c774-44fd-9d5e-114aa59a1b39-console-config\") pod \"console-f9d7485db-8gbfr\" (UID: \"b61fd91c-c774-44fd-9d5e-114aa59a1b39\") " pod="openshift-console/console-f9d7485db-8gbfr" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.826365 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vjshw\" (UniqueName: \"kubernetes.io/projected/8893bf59-e9ad-45f9-a1eb-bfd3ac808a0b-kube-api-access-vjshw\") pod \"openshift-apiserver-operator-796bbdcf4f-7zdlq\" (UID: \"8893bf59-e9ad-45f9-a1eb-bfd3ac808a0b\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-7zdlq" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.826400 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/23db3d43-22c2-4425-944c-201c682f382d-service-ca-bundle\") pod \"authentication-operator-69f744f599-4sjxm\" (UID: \"23db3d43-22c2-4425-944c-201c682f382d\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-4sjxm" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.826420 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b2287b55-49f7-492d-a256-d300d4fee9c8-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-fz89h\" (UID: \"b2287b55-49f7-492d-a256-d300d4fee9c8\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-fz89h" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.826442 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/67e6a9c5-a695-4683-9df7-cab5bace357c-machine-approver-tls\") pod \"machine-approver-56656f9798-v6825\" (UID: \"67e6a9c5-a695-4683-9df7-cab5bace357c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-v6825" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.826463 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/2bdac139-9c3d-4490-b350-b0513395a281-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-xqgjv\" (UID: \"2bdac139-9c3d-4490-b350-b0513395a281\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-xqgjv" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.827680 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/b61fd91c-c774-44fd-9d5e-114aa59a1b39-console-serving-cert\") pod \"console-f9d7485db-8gbfr\" (UID: \"b61fd91c-c774-44fd-9d5e-114aa59a1b39\") " pod="openshift-console/console-f9d7485db-8gbfr" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.827775 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ac283e2f-8cf1-4fae-ab94-8b4d9b57ac0c-client-ca\") pod \"route-controller-manager-6576b87f9c-4m9cx\" (UID: \"ac283e2f-8cf1-4fae-ab94-8b4d9b57ac0c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4m9cx" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.828036 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-dvvh4\" (UID: \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dvvh4" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.828322 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/4872deec-335d-4079-8981-1db7ef98b710-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-cscv9\" (UID: \"4872deec-335d-4079-8981-1db7ef98b710\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cscv9" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.828392 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/67e6a9c5-a695-4683-9df7-cab5bace357c-config\") pod \"machine-approver-56656f9798-v6825\" (UID: \"67e6a9c5-a695-4683-9df7-cab5bace357c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-v6825" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.828552 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8893bf59-e9ad-45f9-a1eb-bfd3ac808a0b-config\") pod \"openshift-apiserver-operator-796bbdcf4f-7zdlq\" (UID: \"8893bf59-e9ad-45f9-a1eb-bfd3ac808a0b\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-7zdlq" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.828854 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/b61fd91c-c774-44fd-9d5e-114aa59a1b39-service-ca\") pod \"console-f9d7485db-8gbfr\" (UID: \"b61fd91c-c774-44fd-9d5e-114aa59a1b39\") " pod="openshift-console/console-f9d7485db-8gbfr" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.829048 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/c01a0586-dcf7-41cb-a48f-a593e4436d7a-etcd-service-ca\") pod \"etcd-operator-b45778765-dnrj5\" (UID: \"c01a0586-dcf7-41cb-a48f-a593e4436d7a\") " pod="openshift-etcd-operator/etcd-operator-b45778765-dnrj5" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.830019 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/61a2958b-98ca-4304-956a-5995bdda407d-metrics-tls\") pod \"dns-operator-744455d44c-zfpqh\" (UID: \"61a2958b-98ca-4304-956a-5995bdda407d\") " pod="openshift-dns-operator/dns-operator-744455d44c-zfpqh" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.830681 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-dvvh4\" (UID: \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dvvh4" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.830827 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8893bf59-e9ad-45f9-a1eb-bfd3ac808a0b-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-7zdlq\" (UID: \"8893bf59-e9ad-45f9-a1eb-bfd3ac808a0b\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-7zdlq" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.830952 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4872deec-335d-4079-8981-1db7ef98b710-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-cscv9\" (UID: \"4872deec-335d-4079-8981-1db7ef98b710\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cscv9" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.831158 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-dvvh4\" (UID: \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dvvh4" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.831158 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c01a0586-dcf7-41cb-a48f-a593e4436d7a-serving-cert\") pod \"etcd-operator-b45778765-dnrj5\" (UID: \"c01a0586-dcf7-41cb-a48f-a593e4436d7a\") " pod="openshift-etcd-operator/etcd-operator-b45778765-dnrj5" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.831374 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/cd78a50e-e3f8-4518-9b93-d8a8d8a08df9-available-featuregates\") pod \"openshift-config-operator-7777fb866f-wbk58\" (UID: \"cd78a50e-e3f8-4518-9b93-d8a8d8a08df9\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-wbk58" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.831421 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-dvvh4\" (UID: \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dvvh4" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.831622 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e537e250-466e-4019-a6bf-57b8301b954e-serving-cert\") pod \"console-operator-58897d9998-m9sp2\" (UID: \"e537e250-466e-4019-a6bf-57b8301b954e\") " pod="openshift-console-operator/console-operator-58897d9998-m9sp2" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.831650 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-dvvh4\" (UID: \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dvvh4" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.832557 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/23db3d43-22c2-4425-944c-201c682f382d-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-4sjxm\" (UID: \"23db3d43-22c2-4425-944c-201c682f382d\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-4sjxm" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.832566 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/2bdac139-9c3d-4490-b350-b0513395a281-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-xqgjv\" (UID: \"2bdac139-9c3d-4490-b350-b0513395a281\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-xqgjv" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.833036 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac283e2f-8cf1-4fae-ab94-8b4d9b57ac0c-config\") pod \"route-controller-manager-6576b87f9c-4m9cx\" (UID: \"ac283e2f-8cf1-4fae-ab94-8b4d9b57ac0c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4m9cx" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.833401 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/b61fd91c-c774-44fd-9d5e-114aa59a1b39-oauth-serving-cert\") pod \"console-f9d7485db-8gbfr\" (UID: \"b61fd91c-c774-44fd-9d5e-114aa59a1b39\") " pod="openshift-console/console-f9d7485db-8gbfr" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.833579 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e537e250-466e-4019-a6bf-57b8301b954e-config\") pod \"console-operator-58897d9998-m9sp2\" (UID: \"e537e250-466e-4019-a6bf-57b8301b954e\") " pod="openshift-console-operator/console-operator-58897d9998-m9sp2" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.833712 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c01a0586-dcf7-41cb-a48f-a593e4436d7a-config\") pod \"etcd-operator-b45778765-dnrj5\" (UID: \"c01a0586-dcf7-41cb-a48f-a593e4436d7a\") " pod="openshift-etcd-operator/etcd-operator-b45778765-dnrj5" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.834010 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-dvvh4\" (UID: \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dvvh4" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.834139 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/23db3d43-22c2-4425-944c-201c682f382d-service-ca-bundle\") pod \"authentication-operator-69f744f599-4sjxm\" (UID: \"23db3d43-22c2-4425-944c-201c682f382d\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-4sjxm" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.837095 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/b61fd91c-c774-44fd-9d5e-114aa59a1b39-console-config\") pod \"console-f9d7485db-8gbfr\" (UID: \"b61fd91c-c774-44fd-9d5e-114aa59a1b39\") " pod="openshift-console/console-f9d7485db-8gbfr" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.837107 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-audit-policies\") pod \"oauth-openshift-558db77b4-dvvh4\" (UID: \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dvvh4" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.836364 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/c01a0586-dcf7-41cb-a48f-a593e4436d7a-etcd-ca\") pod \"etcd-operator-b45778765-dnrj5\" (UID: \"c01a0586-dcf7-41cb-a48f-a593e4436d7a\") " pod="openshift-etcd-operator/etcd-operator-b45778765-dnrj5" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.836474 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b61fd91c-c774-44fd-9d5e-114aa59a1b39-trusted-ca-bundle\") pod \"console-f9d7485db-8gbfr\" (UID: \"b61fd91c-c774-44fd-9d5e-114aa59a1b39\") " pod="openshift-console/console-f9d7485db-8gbfr" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.836747 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cd78a50e-e3f8-4518-9b93-d8a8d8a08df9-serving-cert\") pod \"openshift-config-operator-7777fb866f-wbk58\" (UID: \"cd78a50e-e3f8-4518-9b93-d8a8d8a08df9\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-wbk58" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.836835 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/23db3d43-22c2-4425-944c-201c682f382d-config\") pod \"authentication-operator-69f744f599-4sjxm\" (UID: \"23db3d43-22c2-4425-944c-201c682f382d\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-4sjxm" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.836890 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-dvvh4\" (UID: \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dvvh4" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.837070 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/67e6a9c5-a695-4683-9df7-cab5bace357c-auth-proxy-config\") pod \"machine-approver-56656f9798-v6825\" (UID: \"67e6a9c5-a695-4683-9df7-cab5bace357c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-v6825" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.834229 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/23db3d43-22c2-4425-944c-201c682f382d-serving-cert\") pod \"authentication-operator-69f744f599-4sjxm\" (UID: \"23db3d43-22c2-4425-944c-201c682f382d\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-4sjxm" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.837688 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-dvvh4\" (UID: \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dvvh4" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.838038 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-dvvh4\" (UID: \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dvvh4" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.838536 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ac283e2f-8cf1-4fae-ab94-8b4d9b57ac0c-serving-cert\") pod \"route-controller-manager-6576b87f9c-4m9cx\" (UID: \"ac283e2f-8cf1-4fae-ab94-8b4d9b57ac0c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4m9cx" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.842491 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/b61fd91c-c774-44fd-9d5e-114aa59a1b39-console-oauth-config\") pod \"console-f9d7485db-8gbfr\" (UID: \"b61fd91c-c774-44fd-9d5e-114aa59a1b39\") " pod="openshift-console/console-f9d7485db-8gbfr" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.843535 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.845190 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/67e6a9c5-a695-4683-9df7-cab5bace357c-machine-approver-tls\") pod \"machine-approver-56656f9798-v6825\" (UID: \"67e6a9c5-a695-4683-9df7-cab5bace357c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-v6825" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.845955 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/c01a0586-dcf7-41cb-a48f-a593e4436d7a-etcd-client\") pod \"etcd-operator-b45778765-dnrj5\" (UID: \"c01a0586-dcf7-41cb-a48f-a593e4436d7a\") " pod="openshift-etcd-operator/etcd-operator-b45778765-dnrj5" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.845961 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-dvvh4\" (UID: \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dvvh4" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.846138 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-dvvh4\" (UID: \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dvvh4" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.860026 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.880270 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.900402 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.919629 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.927465 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/580e28be-3dc6-4a6d-ab7f-3d89bae5ed8f-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-djlds\" (UID: \"580e28be-3dc6-4a6d-ab7f-3d89bae5ed8f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-djlds" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.927498 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-57fml\" (UniqueName: \"kubernetes.io/projected/997b01e5-23d3-47fe-953f-ca0100bdb0b6-kube-api-access-57fml\") pod \"migrator-59844c95c7-tcl8h\" (UID: \"997b01e5-23d3-47fe-953f-ca0100bdb0b6\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-tcl8h" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.927522 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e63abe9a-f86e-4dfa-9ddc-3a141ccff375-apiservice-cert\") pod \"packageserver-d55dfcdfc-4xmkd\" (UID: \"e63abe9a-f86e-4dfa-9ddc-3a141ccff375\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4xmkd" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.927551 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/a5e4b9fb-8a86-4ab6-8d80-7007127d1b1a-images\") pod \"machine-config-operator-74547568cd-xfn5c\" (UID: \"a5e4b9fb-8a86-4ab6-8d80-7007127d1b1a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xfn5c" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.927578 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ldl7z\" (UniqueName: \"kubernetes.io/projected/a5e4b9fb-8a86-4ab6-8d80-7007127d1b1a-kube-api-access-ldl7z\") pod \"machine-config-operator-74547568cd-xfn5c\" (UID: \"a5e4b9fb-8a86-4ab6-8d80-7007127d1b1a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xfn5c" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.927598 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4d8dh\" (UniqueName: \"kubernetes.io/projected/23c99006-27db-445e-ae39-365ca63ae52e-kube-api-access-4d8dh\") pod \"olm-operator-6b444d44fb-b2xpr\" (UID: \"23c99006-27db-445e-ae39-365ca63ae52e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-b2xpr" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.927645 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/a5e4b9fb-8a86-4ab6-8d80-7007127d1b1a-proxy-tls\") pod \"machine-config-operator-74547568cd-xfn5c\" (UID: \"a5e4b9fb-8a86-4ab6-8d80-7007127d1b1a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xfn5c" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.927684 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dj6xn\" (UniqueName: \"kubernetes.io/projected/e63abe9a-f86e-4dfa-9ddc-3a141ccff375-kube-api-access-dj6xn\") pod \"packageserver-d55dfcdfc-4xmkd\" (UID: \"e63abe9a-f86e-4dfa-9ddc-3a141ccff375\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4xmkd" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.927719 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/23c99006-27db-445e-ae39-365ca63ae52e-srv-cert\") pod \"olm-operator-6b444d44fb-b2xpr\" (UID: \"23c99006-27db-445e-ae39-365ca63ae52e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-b2xpr" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.927748 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b2287b55-49f7-492d-a256-d300d4fee9c8-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-fz89h\" (UID: \"b2287b55-49f7-492d-a256-d300d4fee9c8\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-fz89h" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.927768 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/580e28be-3dc6-4a6d-ab7f-3d89bae5ed8f-proxy-tls\") pod \"machine-config-controller-84d6567774-djlds\" (UID: \"580e28be-3dc6-4a6d-ab7f-3d89bae5ed8f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-djlds" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.927816 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b2287b55-49f7-492d-a256-d300d4fee9c8-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-fz89h\" (UID: \"b2287b55-49f7-492d-a256-d300d4fee9c8\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-fz89h" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.927859 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nb9dg\" (UniqueName: \"kubernetes.io/projected/580e28be-3dc6-4a6d-ab7f-3d89bae5ed8f-kube-api-access-nb9dg\") pod \"machine-config-controller-84d6567774-djlds\" (UID: \"580e28be-3dc6-4a6d-ab7f-3d89bae5ed8f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-djlds" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.927882 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/23c99006-27db-445e-ae39-365ca63ae52e-profile-collector-cert\") pod \"olm-operator-6b444d44fb-b2xpr\" (UID: \"23c99006-27db-445e-ae39-365ca63ae52e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-b2xpr" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.927954 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e63abe9a-f86e-4dfa-9ddc-3a141ccff375-webhook-cert\") pod \"packageserver-d55dfcdfc-4xmkd\" (UID: \"e63abe9a-f86e-4dfa-9ddc-3a141ccff375\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4xmkd" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.927986 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b2287b55-49f7-492d-a256-d300d4fee9c8-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-fz89h\" (UID: \"b2287b55-49f7-492d-a256-d300d4fee9c8\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-fz89h" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.928035 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/a5e4b9fb-8a86-4ab6-8d80-7007127d1b1a-auth-proxy-config\") pod \"machine-config-operator-74547568cd-xfn5c\" (UID: \"a5e4b9fb-8a86-4ab6-8d80-7007127d1b1a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xfn5c" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.928056 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/e63abe9a-f86e-4dfa-9ddc-3a141ccff375-tmpfs\") pod \"packageserver-d55dfcdfc-4xmkd\" (UID: \"e63abe9a-f86e-4dfa-9ddc-3a141ccff375\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4xmkd" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.928278 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/580e28be-3dc6-4a6d-ab7f-3d89bae5ed8f-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-djlds\" (UID: \"580e28be-3dc6-4a6d-ab7f-3d89bae5ed8f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-djlds" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.928508 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/e63abe9a-f86e-4dfa-9ddc-3a141ccff375-tmpfs\") pod \"packageserver-d55dfcdfc-4xmkd\" (UID: \"e63abe9a-f86e-4dfa-9ddc-3a141ccff375\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4xmkd" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.929050 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/a5e4b9fb-8a86-4ab6-8d80-7007127d1b1a-auth-proxy-config\") pod \"machine-config-operator-74547568cd-xfn5c\" (UID: \"a5e4b9fb-8a86-4ab6-8d80-7007127d1b1a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xfn5c" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.940578 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.960162 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.980054 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Sep 29 09:31:40 crc kubenswrapper[4779]: I0929 09:31:40.999818 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Sep 29 09:31:41 crc kubenswrapper[4779]: I0929 09:31:41.006791 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:31:41 crc kubenswrapper[4779]: I0929 09:31:41.020734 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Sep 29 09:31:41 crc kubenswrapper[4779]: I0929 09:31:41.040878 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Sep 29 09:31:41 crc kubenswrapper[4779]: I0929 09:31:41.060128 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Sep 29 09:31:41 crc kubenswrapper[4779]: I0929 09:31:41.081076 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Sep 29 09:31:41 crc kubenswrapper[4779]: I0929 09:31:41.101105 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Sep 29 09:31:41 crc kubenswrapper[4779]: I0929 09:31:41.120934 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Sep 29 09:31:41 crc kubenswrapper[4779]: I0929 09:31:41.140672 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Sep 29 09:31:41 crc kubenswrapper[4779]: I0929 09:31:41.160871 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Sep 29 09:31:41 crc kubenswrapper[4779]: I0929 09:31:41.179611 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Sep 29 09:31:41 crc kubenswrapper[4779]: I0929 09:31:41.200775 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Sep 29 09:31:41 crc kubenswrapper[4779]: I0929 09:31:41.221057 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Sep 29 09:31:41 crc kubenswrapper[4779]: I0929 09:31:41.234007 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b2287b55-49f7-492d-a256-d300d4fee9c8-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-fz89h\" (UID: \"b2287b55-49f7-492d-a256-d300d4fee9c8\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-fz89h" Sep 29 09:31:41 crc kubenswrapper[4779]: I0929 09:31:41.240379 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Sep 29 09:31:41 crc kubenswrapper[4779]: I0929 09:31:41.250282 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b2287b55-49f7-492d-a256-d300d4fee9c8-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-fz89h\" (UID: \"b2287b55-49f7-492d-a256-d300d4fee9c8\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-fz89h" Sep 29 09:31:41 crc kubenswrapper[4779]: I0929 09:31:41.260296 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Sep 29 09:31:41 crc kubenswrapper[4779]: I0929 09:31:41.280007 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Sep 29 09:31:41 crc kubenswrapper[4779]: I0929 09:31:41.300629 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Sep 29 09:31:41 crc kubenswrapper[4779]: I0929 09:31:41.321287 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Sep 29 09:31:41 crc kubenswrapper[4779]: I0929 09:31:41.348321 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Sep 29 09:31:41 crc kubenswrapper[4779]: I0929 09:31:41.360053 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Sep 29 09:31:41 crc kubenswrapper[4779]: I0929 09:31:41.381962 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Sep 29 09:31:41 crc kubenswrapper[4779]: I0929 09:31:41.401157 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Sep 29 09:31:41 crc kubenswrapper[4779]: I0929 09:31:41.420803 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Sep 29 09:31:41 crc kubenswrapper[4779]: I0929 09:31:41.442020 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Sep 29 09:31:41 crc kubenswrapper[4779]: I0929 09:31:41.461081 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Sep 29 09:31:41 crc kubenswrapper[4779]: I0929 09:31:41.480741 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Sep 29 09:31:41 crc kubenswrapper[4779]: I0929 09:31:41.492873 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/580e28be-3dc6-4a6d-ab7f-3d89bae5ed8f-proxy-tls\") pod \"machine-config-controller-84d6567774-djlds\" (UID: \"580e28be-3dc6-4a6d-ab7f-3d89bae5ed8f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-djlds" Sep 29 09:31:41 crc kubenswrapper[4779]: I0929 09:31:41.501386 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Sep 29 09:31:41 crc kubenswrapper[4779]: I0929 09:31:41.508564 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/a5e4b9fb-8a86-4ab6-8d80-7007127d1b1a-images\") pod \"machine-config-operator-74547568cd-xfn5c\" (UID: \"a5e4b9fb-8a86-4ab6-8d80-7007127d1b1a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xfn5c" Sep 29 09:31:41 crc kubenswrapper[4779]: I0929 09:31:41.520429 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Sep 29 09:31:41 crc kubenswrapper[4779]: I0929 09:31:41.540374 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Sep 29 09:31:41 crc kubenswrapper[4779]: I0929 09:31:41.561271 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Sep 29 09:31:41 crc kubenswrapper[4779]: I0929 09:31:41.572349 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/a5e4b9fb-8a86-4ab6-8d80-7007127d1b1a-proxy-tls\") pod \"machine-config-operator-74547568cd-xfn5c\" (UID: \"a5e4b9fb-8a86-4ab6-8d80-7007127d1b1a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xfn5c" Sep 29 09:31:41 crc kubenswrapper[4779]: I0929 09:31:41.581397 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Sep 29 09:31:41 crc kubenswrapper[4779]: I0929 09:31:41.593041 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e63abe9a-f86e-4dfa-9ddc-3a141ccff375-apiservice-cert\") pod \"packageserver-d55dfcdfc-4xmkd\" (UID: \"e63abe9a-f86e-4dfa-9ddc-3a141ccff375\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4xmkd" Sep 29 09:31:41 crc kubenswrapper[4779]: I0929 09:31:41.594307 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e63abe9a-f86e-4dfa-9ddc-3a141ccff375-webhook-cert\") pod \"packageserver-d55dfcdfc-4xmkd\" (UID: \"e63abe9a-f86e-4dfa-9ddc-3a141ccff375\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4xmkd" Sep 29 09:31:41 crc kubenswrapper[4779]: I0929 09:31:41.601644 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Sep 29 09:31:41 crc kubenswrapper[4779]: I0929 09:31:41.621871 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Sep 29 09:31:41 crc kubenswrapper[4779]: I0929 09:31:41.641293 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Sep 29 09:31:41 crc kubenswrapper[4779]: I0929 09:31:41.660971 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Sep 29 09:31:41 crc kubenswrapper[4779]: I0929 09:31:41.672748 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/23c99006-27db-445e-ae39-365ca63ae52e-srv-cert\") pod \"olm-operator-6b444d44fb-b2xpr\" (UID: \"23c99006-27db-445e-ae39-365ca63ae52e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-b2xpr" Sep 29 09:31:41 crc kubenswrapper[4779]: I0929 09:31:41.682278 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Sep 29 09:31:41 crc kubenswrapper[4779]: I0929 09:31:41.691403 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/23c99006-27db-445e-ae39-365ca63ae52e-profile-collector-cert\") pod \"olm-operator-6b444d44fb-b2xpr\" (UID: \"23c99006-27db-445e-ae39-365ca63ae52e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-b2xpr" Sep 29 09:31:41 crc kubenswrapper[4779]: I0929 09:31:41.700991 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Sep 29 09:31:41 crc kubenswrapper[4779]: I0929 09:31:41.718722 4779 request.go:700] Waited for 1.005108118s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-network-diagnostics/configmaps?fieldSelector=metadata.name%3Dopenshift-service-ca.crt&limit=500&resourceVersion=0 Sep 29 09:31:41 crc kubenswrapper[4779]: I0929 09:31:41.720971 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Sep 29 09:31:41 crc kubenswrapper[4779]: I0929 09:31:41.741163 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Sep 29 09:31:41 crc kubenswrapper[4779]: I0929 09:31:41.782158 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c95dt\" (UniqueName: \"kubernetes.io/projected/edc8c507-aa54-4f0e-b64f-265ff1860ca0-kube-api-access-c95dt\") pod \"machine-api-operator-5694c8668f-fq82t\" (UID: \"edc8c507-aa54-4f0e-b64f-265ff1860ca0\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fq82t" Sep 29 09:31:41 crc kubenswrapper[4779]: I0929 09:31:41.806564 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ttg5x\" (UniqueName: \"kubernetes.io/projected/f420d1df-5091-46b0-be80-9a83e5be1a65-kube-api-access-ttg5x\") pod \"apiserver-76f77b778f-zhwns\" (UID: \"f420d1df-5091-46b0-be80-9a83e5be1a65\") " pod="openshift-apiserver/apiserver-76f77b778f-zhwns" Sep 29 09:31:41 crc kubenswrapper[4779]: I0929 09:31:41.822535 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-fq82t" Sep 29 09:31:41 crc kubenswrapper[4779]: I0929 09:31:41.844742 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9n7pb\" (UniqueName: \"kubernetes.io/projected/4a3ce3bd-7194-4b10-9788-2b570b1814c0-kube-api-access-9n7pb\") pod \"apiserver-7bbb656c7d-44nv6\" (UID: \"4a3ce3bd-7194-4b10-9788-2b570b1814c0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44nv6" Sep 29 09:31:41 crc kubenswrapper[4779]: I0929 09:31:41.850837 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ngnl4\" (UniqueName: \"kubernetes.io/projected/66b2a431-ff77-457e-99aa-d4dd40bc4640-kube-api-access-ngnl4\") pod \"controller-manager-879f6c89f-j6lzv\" (UID: \"66b2a431-ff77-457e-99aa-d4dd40bc4640\") " pod="openshift-controller-manager/controller-manager-879f6c89f-j6lzv" Sep 29 09:31:41 crc kubenswrapper[4779]: I0929 09:31:41.861092 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Sep 29 09:31:41 crc kubenswrapper[4779]: I0929 09:31:41.865430 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-zhwns" Sep 29 09:31:41 crc kubenswrapper[4779]: I0929 09:31:41.881035 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Sep 29 09:31:41 crc kubenswrapper[4779]: I0929 09:31:41.901143 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Sep 29 09:31:41 crc kubenswrapper[4779]: I0929 09:31:41.920338 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Sep 29 09:31:41 crc kubenswrapper[4779]: I0929 09:31:41.941201 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Sep 29 09:31:41 crc kubenswrapper[4779]: I0929 09:31:41.962776 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Sep 29 09:31:41 crc kubenswrapper[4779]: I0929 09:31:41.980732 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.001388 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.020994 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.041679 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.041848 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-zhwns"] Sep 29 09:31:42 crc kubenswrapper[4779]: W0929 09:31:42.059966 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf420d1df_5091_46b0_be80_9a83e5be1a65.slice/crio-984a993d3caad083d0ca9c6949c54d9da3fddae1b81ae579f8b6a353aca254cf WatchSource:0}: Error finding container 984a993d3caad083d0ca9c6949c54d9da3fddae1b81ae579f8b6a353aca254cf: Status 404 returned error can't find the container with id 984a993d3caad083d0ca9c6949c54d9da3fddae1b81ae579f8b6a353aca254cf Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.061468 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.062639 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-j6lzv" Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.080287 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.100012 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.106359 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44nv6" Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.121087 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.144802 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.161339 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.181390 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.201128 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.217232 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-j6lzv"] Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.220161 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Sep 29 09:31:42 crc kubenswrapper[4779]: W0929 09:31:42.230983 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod66b2a431_ff77_457e_99aa_d4dd40bc4640.slice/crio-9b6f39a6656ebede26a83c06c747133258dfc361cd89b39ba51c1713ccb0109c WatchSource:0}: Error finding container 9b6f39a6656ebede26a83c06c747133258dfc361cd89b39ba51c1713ccb0109c: Status 404 returned error can't find the container with id 9b6f39a6656ebede26a83c06c747133258dfc361cd89b39ba51c1713ccb0109c Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.240603 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.262861 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-44nv6"] Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.263595 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.267262 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-fq82t"] Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.280576 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.300230 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.320684 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.340105 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.360084 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.380010 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.396696 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-j6lzv" event={"ID":"66b2a431-ff77-457e-99aa-d4dd40bc4640","Type":"ContainerStarted","Data":"74e5b6fca00eef71d1d5dd2c0d79596d5199414727042fd70d6f950c7b8b694e"} Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.397068 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-j6lzv" event={"ID":"66b2a431-ff77-457e-99aa-d4dd40bc4640","Type":"ContainerStarted","Data":"9b6f39a6656ebede26a83c06c747133258dfc361cd89b39ba51c1713ccb0109c"} Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.397389 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-j6lzv" Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.398763 4779 generic.go:334] "Generic (PLEG): container finished" podID="f420d1df-5091-46b0-be80-9a83e5be1a65" containerID="a09258917f1ada9b5edcd1bbcee1b707ba684dd4c90956d34208b0c7764d50f0" exitCode=0 Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.398810 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-zhwns" event={"ID":"f420d1df-5091-46b0-be80-9a83e5be1a65","Type":"ContainerDied","Data":"a09258917f1ada9b5edcd1bbcee1b707ba684dd4c90956d34208b0c7764d50f0"} Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.398845 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-zhwns" event={"ID":"f420d1df-5091-46b0-be80-9a83e5be1a65","Type":"ContainerStarted","Data":"984a993d3caad083d0ca9c6949c54d9da3fddae1b81ae579f8b6a353aca254cf"} Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.399435 4779 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-j6lzv container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" start-of-body= Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.399477 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-j6lzv" podUID="66b2a431-ff77-457e-99aa-d4dd40bc4640" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.400917 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-fq82t" event={"ID":"edc8c507-aa54-4f0e-b64f-265ff1860ca0","Type":"ContainerStarted","Data":"1f8382c70903b1b6f522020ec782edad3c379f2cf82d0b7a39e984171b4c6ae0"} Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.400958 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-fq82t" event={"ID":"edc8c507-aa54-4f0e-b64f-265ff1860ca0","Type":"ContainerStarted","Data":"8d5b02ab86958034ea36d31a0e4e1a43ca45ed43e9cb73b69afd4753cff98a98"} Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.402140 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44nv6" event={"ID":"4a3ce3bd-7194-4b10-9788-2b570b1814c0","Type":"ContainerStarted","Data":"c98a150c36b766b11a984c3def559683b34a73f044387162db58052ea25ee406"} Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.409193 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.420467 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.440737 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.460423 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.481498 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.500444 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.521150 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.540516 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.561150 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.580197 4779 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.600209 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.621234 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.641565 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.660948 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.679787 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.718950 4779 request.go:700] Waited for 1.895881421s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-etcd-operator/serviceaccounts/etcd-operator/token Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.741312 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tkmkr\" (UniqueName: \"kubernetes.io/projected/c01a0586-dcf7-41cb-a48f-a593e4436d7a-kube-api-access-tkmkr\") pod \"etcd-operator-b45778765-dnrj5\" (UID: \"c01a0586-dcf7-41cb-a48f-a593e4436d7a\") " pod="openshift-etcd-operator/etcd-operator-b45778765-dnrj5" Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.755620 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wjvcq\" (UniqueName: \"kubernetes.io/projected/b61fd91c-c774-44fd-9d5e-114aa59a1b39-kube-api-access-wjvcq\") pod \"console-f9d7485db-8gbfr\" (UID: \"b61fd91c-c774-44fd-9d5e-114aa59a1b39\") " pod="openshift-console/console-f9d7485db-8gbfr" Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.774813 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4872deec-335d-4079-8981-1db7ef98b710-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-cscv9\" (UID: \"4872deec-335d-4079-8981-1db7ef98b710\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cscv9" Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.798965 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nzcqn\" (UniqueName: \"kubernetes.io/projected/4872deec-335d-4079-8981-1db7ef98b710-kube-api-access-nzcqn\") pod \"cluster-image-registry-operator-dc59b4c8b-cscv9\" (UID: \"4872deec-335d-4079-8981-1db7ef98b710\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cscv9" Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.815478 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5ngf9\" (UniqueName: \"kubernetes.io/projected/61a2958b-98ca-4304-956a-5995bdda407d-kube-api-access-5ngf9\") pod \"dns-operator-744455d44c-zfpqh\" (UID: \"61a2958b-98ca-4304-956a-5995bdda407d\") " pod="openshift-dns-operator/dns-operator-744455d44c-zfpqh" Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.834653 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vjshw\" (UniqueName: \"kubernetes.io/projected/8893bf59-e9ad-45f9-a1eb-bfd3ac808a0b-kube-api-access-vjshw\") pod \"openshift-apiserver-operator-796bbdcf4f-7zdlq\" (UID: \"8893bf59-e9ad-45f9-a1eb-bfd3ac808a0b\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-7zdlq" Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.855612 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5trpn\" (UniqueName: \"kubernetes.io/projected/2bdac139-9c3d-4490-b350-b0513395a281-kube-api-access-5trpn\") pod \"cluster-samples-operator-665b6dd947-xqgjv\" (UID: \"2bdac139-9c3d-4490-b350-b0513395a281\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-xqgjv" Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.868486 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-7zdlq" Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.875539 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h6dqt\" (UniqueName: \"kubernetes.io/projected/cd78a50e-e3f8-4518-9b93-d8a8d8a08df9-kube-api-access-h6dqt\") pod \"openshift-config-operator-7777fb866f-wbk58\" (UID: \"cd78a50e-e3f8-4518-9b93-d8a8d8a08df9\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-wbk58" Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.887715 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-zfpqh" Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.895110 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hp25w\" (UniqueName: \"kubernetes.io/projected/ac283e2f-8cf1-4fae-ab94-8b4d9b57ac0c-kube-api-access-hp25w\") pod \"route-controller-manager-6576b87f9c-4m9cx\" (UID: \"ac283e2f-8cf1-4fae-ab94-8b4d9b57ac0c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4m9cx" Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.899755 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-8gbfr" Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.908745 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cscv9" Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.913679 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-dnrj5" Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.913931 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rgnqn\" (UniqueName: \"kubernetes.io/projected/23db3d43-22c2-4425-944c-201c682f382d-kube-api-access-rgnqn\") pod \"authentication-operator-69f744f599-4sjxm\" (UID: \"23db3d43-22c2-4425-944c-201c682f382d\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-4sjxm" Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.943352 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x7zdb\" (UniqueName: \"kubernetes.io/projected/67e6a9c5-a695-4683-9df7-cab5bace357c-kube-api-access-x7zdb\") pod \"machine-approver-56656f9798-v6825\" (UID: \"67e6a9c5-a695-4683-9df7-cab5bace357c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-v6825" Sep 29 09:31:42 crc kubenswrapper[4779]: I0929 09:31:42.960292 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lrvjs\" (UniqueName: \"kubernetes.io/projected/e537e250-466e-4019-a6bf-57b8301b954e-kube-api-access-lrvjs\") pod \"console-operator-58897d9998-m9sp2\" (UID: \"e537e250-466e-4019-a6bf-57b8301b954e\") " pod="openshift-console-operator/console-operator-58897d9998-m9sp2" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.025982 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b2xkn\" (UniqueName: \"kubernetes.io/projected/d48d8d73-7021-4ebd-8321-b7f73e330c1c-kube-api-access-b2xkn\") pod \"downloads-7954f5f757-qpmlh\" (UID: \"d48d8d73-7021-4ebd-8321-b7f73e330c1c\") " pod="openshift-console/downloads-7954f5f757-qpmlh" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.029083 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-57fml\" (UniqueName: \"kubernetes.io/projected/997b01e5-23d3-47fe-953f-ca0100bdb0b6-kube-api-access-57fml\") pod \"migrator-59844c95c7-tcl8h\" (UID: \"997b01e5-23d3-47fe-953f-ca0100bdb0b6\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-tcl8h" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.029507 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rp5tn\" (UniqueName: \"kubernetes.io/projected/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-kube-api-access-rp5tn\") pod \"oauth-openshift-558db77b4-dvvh4\" (UID: \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\") " pod="openshift-authentication/oauth-openshift-558db77b4-dvvh4" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.040019 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dj6xn\" (UniqueName: \"kubernetes.io/projected/e63abe9a-f86e-4dfa-9ddc-3a141ccff375-kube-api-access-dj6xn\") pod \"packageserver-d55dfcdfc-4xmkd\" (UID: \"e63abe9a-f86e-4dfa-9ddc-3a141ccff375\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4xmkd" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.059335 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ldl7z\" (UniqueName: \"kubernetes.io/projected/a5e4b9fb-8a86-4ab6-8d80-7007127d1b1a-kube-api-access-ldl7z\") pod \"machine-config-operator-74547568cd-xfn5c\" (UID: \"a5e4b9fb-8a86-4ab6-8d80-7007127d1b1a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xfn5c" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.075091 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4d8dh\" (UniqueName: \"kubernetes.io/projected/23c99006-27db-445e-ae39-365ca63ae52e-kube-api-access-4d8dh\") pod \"olm-operator-6b444d44fb-b2xpr\" (UID: \"23c99006-27db-445e-ae39-365ca63ae52e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-b2xpr" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.096193 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-dvvh4" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.099888 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nb9dg\" (UniqueName: \"kubernetes.io/projected/580e28be-3dc6-4a6d-ab7f-3d89bae5ed8f-kube-api-access-nb9dg\") pod \"machine-config-controller-84d6567774-djlds\" (UID: \"580e28be-3dc6-4a6d-ab7f-3d89bae5ed8f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-djlds" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.104059 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4m9cx" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.113052 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-v6825" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.126669 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b2287b55-49f7-492d-a256-d300d4fee9c8-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-fz89h\" (UID: \"b2287b55-49f7-492d-a256-d300d4fee9c8\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-fz89h" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.160779 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-xqgjv" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.161156 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dpk8n\" (UniqueName: \"kubernetes.io/projected/5f395791-fcf5-4602-903b-06c24127b40e-kube-api-access-dpk8n\") pod \"image-registry-697d97f7c8-rzc4j\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.161201 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8a39cfe4-4366-4e3c-81a1-5a0694840afb-metrics-certs\") pod \"router-default-5444994796-f4jjc\" (UID: \"8a39cfe4-4366-4e3c-81a1-5a0694840afb\") " pod="openshift-ingress/router-default-5444994796-f4jjc" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.161224 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6161b6da-ce56-4639-be44-28e54239bce0-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-bbsrp\" (UID: \"6161b6da-ce56-4639-be44-28e54239bce0\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bbsrp" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.161270 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/5f395791-fcf5-4602-903b-06c24127b40e-ca-trust-extracted\") pod \"image-registry-697d97f7c8-rzc4j\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.161294 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/8a39cfe4-4366-4e3c-81a1-5a0694840afb-default-certificate\") pod \"router-default-5444994796-f4jjc\" (UID: \"8a39cfe4-4366-4e3c-81a1-5a0694840afb\") " pod="openshift-ingress/router-default-5444994796-f4jjc" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.161321 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/b9929052-c8a6-48a6-9520-9b8f4dc396e0-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-bp6p9\" (UID: \"b9929052-c8a6-48a6-9520-9b8f4dc396e0\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bp6p9" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.161345 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/bf4e3cb9-5686-44c9-ad62-1c4bd7e44d8a-srv-cert\") pod \"catalog-operator-68c6474976-jngcm\" (UID: \"bf4e3cb9-5686-44c9-ad62-1c4bd7e44d8a\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-jngcm" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.161408 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/5f395791-fcf5-4602-903b-06c24127b40e-installation-pull-secrets\") pod \"image-registry-697d97f7c8-rzc4j\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.161441 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/bf4e3cb9-5686-44c9-ad62-1c4bd7e44d8a-profile-collector-cert\") pod \"catalog-operator-68c6474976-jngcm\" (UID: \"bf4e3cb9-5686-44c9-ad62-1c4bd7e44d8a\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-jngcm" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.161466 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/3d47c69e-9ca4-4bd5-a362-3c0051825a7e-metrics-tls\") pod \"ingress-operator-5b745b69d9-jvmch\" (UID: \"3d47c69e-9ca4-4bd5-a362-3c0051825a7e\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jvmch" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.161511 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fxm2z\" (UniqueName: \"kubernetes.io/projected/8a39cfe4-4366-4e3c-81a1-5a0694840afb-kube-api-access-fxm2z\") pod \"router-default-5444994796-f4jjc\" (UID: \"8a39cfe4-4366-4e3c-81a1-5a0694840afb\") " pod="openshift-ingress/router-default-5444994796-f4jjc" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.161538 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rzc4j\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.161562 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/5f395791-fcf5-4602-903b-06c24127b40e-registry-tls\") pod \"image-registry-697d97f7c8-rzc4j\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.161647 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4dss2\" (UniqueName: \"kubernetes.io/projected/bf4e3cb9-5686-44c9-ad62-1c4bd7e44d8a-kube-api-access-4dss2\") pod \"catalog-operator-68c6474976-jngcm\" (UID: \"bf4e3cb9-5686-44c9-ad62-1c4bd7e44d8a\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-jngcm" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.161696 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3d47c69e-9ca4-4bd5-a362-3c0051825a7e-bound-sa-token\") pod \"ingress-operator-5b745b69d9-jvmch\" (UID: \"3d47c69e-9ca4-4bd5-a362-3c0051825a7e\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jvmch" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.161720 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/30cc1270-b838-459f-a925-f971ed08b550-config\") pod \"kube-controller-manager-operator-78b949d7b-67pmp\" (UID: \"30cc1270-b838-459f-a925-f971ed08b550\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-67pmp" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.161786 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vm6tf\" (UniqueName: \"kubernetes.io/projected/3d47c69e-9ca4-4bd5-a362-3c0051825a7e-kube-api-access-vm6tf\") pod \"ingress-operator-5b745b69d9-jvmch\" (UID: \"3d47c69e-9ca4-4bd5-a362-3c0051825a7e\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jvmch" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.161826 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qsws8\" (UniqueName: \"kubernetes.io/projected/b9929052-c8a6-48a6-9520-9b8f4dc396e0-kube-api-access-qsws8\") pod \"control-plane-machine-set-operator-78cbb6b69f-bp6p9\" (UID: \"b9929052-c8a6-48a6-9520-9b8f4dc396e0\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bp6p9" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.161864 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5f395791-fcf5-4602-903b-06c24127b40e-bound-sa-token\") pod \"image-registry-697d97f7c8-rzc4j\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.161888 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/8a39cfe4-4366-4e3c-81a1-5a0694840afb-stats-auth\") pod \"router-default-5444994796-f4jjc\" (UID: \"8a39cfe4-4366-4e3c-81a1-5a0694840afb\") " pod="openshift-ingress/router-default-5444994796-f4jjc" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.161934 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3d47c69e-9ca4-4bd5-a362-3c0051825a7e-trusted-ca\") pod \"ingress-operator-5b745b69d9-jvmch\" (UID: \"3d47c69e-9ca4-4bd5-a362-3c0051825a7e\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jvmch" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.161960 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/30cc1270-b838-459f-a925-f971ed08b550-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-67pmp\" (UID: \"30cc1270-b838-459f-a925-f971ed08b550\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-67pmp" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.162029 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zp8wt\" (UniqueName: \"kubernetes.io/projected/6161b6da-ce56-4639-be44-28e54239bce0-kube-api-access-zp8wt\") pod \"openshift-controller-manager-operator-756b6f6bc6-bbsrp\" (UID: \"6161b6da-ce56-4639-be44-28e54239bce0\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bbsrp" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.162087 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/5f395791-fcf5-4602-903b-06c24127b40e-registry-certificates\") pod \"image-registry-697d97f7c8-rzc4j\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.162145 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5f395791-fcf5-4602-903b-06c24127b40e-trusted-ca\") pod \"image-registry-697d97f7c8-rzc4j\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.162168 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6161b6da-ce56-4639-be44-28e54239bce0-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-bbsrp\" (UID: \"6161b6da-ce56-4639-be44-28e54239bce0\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bbsrp" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.162193 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8a39cfe4-4366-4e3c-81a1-5a0694840afb-service-ca-bundle\") pod \"router-default-5444994796-f4jjc\" (UID: \"8a39cfe4-4366-4e3c-81a1-5a0694840afb\") " pod="openshift-ingress/router-default-5444994796-f4jjc" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.162219 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/30cc1270-b838-459f-a925-f971ed08b550-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-67pmp\" (UID: \"30cc1270-b838-459f-a925-f971ed08b550\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-67pmp" Sep 29 09:31:43 crc kubenswrapper[4779]: E0929 09:31:43.168271 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 09:31:43.668253435 +0000 UTC m=+135.649577339 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rzc4j" (UID: "5f395791-fcf5-4602-903b-06c24127b40e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.168885 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-4sjxm" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.174511 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-wbk58" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.181324 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-m9sp2" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.194084 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-qpmlh" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.265074 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 09:31:43 crc kubenswrapper[4779]: E0929 09:31:43.265283 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 09:31:43.765255798 +0000 UTC m=+135.746579702 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.265317 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vm6tf\" (UniqueName: \"kubernetes.io/projected/3d47c69e-9ca4-4bd5-a362-3c0051825a7e-kube-api-access-vm6tf\") pod \"ingress-operator-5b745b69d9-jvmch\" (UID: \"3d47c69e-9ca4-4bd5-a362-3c0051825a7e\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jvmch" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.265383 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jz85s\" (UniqueName: \"kubernetes.io/projected/4d97870e-816a-415f-a3ad-1ccdf24e035f-kube-api-access-jz85s\") pod \"ingress-canary-xhrzk\" (UID: \"4d97870e-816a-415f-a3ad-1ccdf24e035f\") " pod="openshift-ingress-canary/ingress-canary-xhrzk" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.265460 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qsws8\" (UniqueName: \"kubernetes.io/projected/b9929052-c8a6-48a6-9520-9b8f4dc396e0-kube-api-access-qsws8\") pod \"control-plane-machine-set-operator-78cbb6b69f-bp6p9\" (UID: \"b9929052-c8a6-48a6-9520-9b8f4dc396e0\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bp6p9" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.265490 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/cb9c789a-4f39-46ee-8e61-5e39d65daf38-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-dkmtc\" (UID: \"cb9c789a-4f39-46ee-8e61-5e39d65daf38\") " pod="openshift-marketplace/marketplace-operator-79b997595-dkmtc" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.265539 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/dc8659b1-2788-498e-bc4c-c294328dde71-signing-key\") pod \"service-ca-9c57cc56f-blxvw\" (UID: \"dc8659b1-2788-498e-bc4c-c294328dde71\") " pod="openshift-service-ca/service-ca-9c57cc56f-blxvw" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.265568 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5f395791-fcf5-4602-903b-06c24127b40e-bound-sa-token\") pod \"image-registry-697d97f7c8-rzc4j\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.265588 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/8a39cfe4-4366-4e3c-81a1-5a0694840afb-stats-auth\") pod \"router-default-5444994796-f4jjc\" (UID: \"8a39cfe4-4366-4e3c-81a1-5a0694840afb\") " pod="openshift-ingress/router-default-5444994796-f4jjc" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.265637 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3d47c69e-9ca4-4bd5-a362-3c0051825a7e-trusted-ca\") pod \"ingress-operator-5b745b69d9-jvmch\" (UID: \"3d47c69e-9ca4-4bd5-a362-3c0051825a7e\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jvmch" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.265699 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/30cc1270-b838-459f-a925-f971ed08b550-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-67pmp\" (UID: \"30cc1270-b838-459f-a925-f971ed08b550\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-67pmp" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.265745 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zp8wt\" (UniqueName: \"kubernetes.io/projected/6161b6da-ce56-4639-be44-28e54239bce0-kube-api-access-zp8wt\") pod \"openshift-controller-manager-operator-756b6f6bc6-bbsrp\" (UID: \"6161b6da-ce56-4639-be44-28e54239bce0\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bbsrp" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.265802 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d2qpg\" (UniqueName: \"kubernetes.io/projected/5cc982ee-40a1-4ac8-a936-a626ee218633-kube-api-access-d2qpg\") pod \"dns-default-jx6p6\" (UID: \"5cc982ee-40a1-4ac8-a936-a626ee218633\") " pod="openshift-dns/dns-default-jx6p6" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.265864 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g6x5p\" (UniqueName: \"kubernetes.io/projected/c167fa4c-07a4-4a57-8fc0-a56b59c54ee6-kube-api-access-g6x5p\") pod \"csi-hostpathplugin-vkr5d\" (UID: \"c167fa4c-07a4-4a57-8fc0-a56b59c54ee6\") " pod="hostpath-provisioner/csi-hostpathplugin-vkr5d" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.265938 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ce544002-58ff-46a0-a565-291a6dd31673-serving-cert\") pod \"service-ca-operator-777779d784-b8g42\" (UID: \"ce544002-58ff-46a0-a565-291a6dd31673\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-b8g42" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.265978 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/b59a8e7c-0cc5-4ea8-ac5f-2d80959f8933-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-qwpch\" (UID: \"b59a8e7c-0cc5-4ea8-ac5f-2d80959f8933\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qwpch" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.266063 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5cc982ee-40a1-4ac8-a936-a626ee218633-config-volume\") pod \"dns-default-jx6p6\" (UID: \"5cc982ee-40a1-4ac8-a936-a626ee218633\") " pod="openshift-dns/dns-default-jx6p6" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.266148 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4d97870e-816a-415f-a3ad-1ccdf24e035f-cert\") pod \"ingress-canary-xhrzk\" (UID: \"4d97870e-816a-415f-a3ad-1ccdf24e035f\") " pod="openshift-ingress-canary/ingress-canary-xhrzk" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.266200 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/c167fa4c-07a4-4a57-8fc0-a56b59c54ee6-registration-dir\") pod \"csi-hostpathplugin-vkr5d\" (UID: \"c167fa4c-07a4-4a57-8fc0-a56b59c54ee6\") " pod="hostpath-provisioner/csi-hostpathplugin-vkr5d" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.266283 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/5f395791-fcf5-4602-903b-06c24127b40e-registry-certificates\") pod \"image-registry-697d97f7c8-rzc4j\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.266352 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5f395791-fcf5-4602-903b-06c24127b40e-trusted-ca\") pod \"image-registry-697d97f7c8-rzc4j\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.266589 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6161b6da-ce56-4639-be44-28e54239bce0-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-bbsrp\" (UID: \"6161b6da-ce56-4639-be44-28e54239bce0\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bbsrp" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.266674 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8a39cfe4-4366-4e3c-81a1-5a0694840afb-service-ca-bundle\") pod \"router-default-5444994796-f4jjc\" (UID: \"8a39cfe4-4366-4e3c-81a1-5a0694840afb\") " pod="openshift-ingress/router-default-5444994796-f4jjc" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.266826 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1c0ae796-2abb-42c7-bbef-9ddbd496420a-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-mvxx7\" (UID: \"1c0ae796-2abb-42c7-bbef-9ddbd496420a\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mvxx7" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.266979 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/30cc1270-b838-459f-a925-f971ed08b550-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-67pmp\" (UID: \"30cc1270-b838-459f-a925-f971ed08b550\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-67pmp" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.267452 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/6d3d9d05-b7a1-45ef-86a5-edc7d2ba3903-certs\") pod \"machine-config-server-sp4jc\" (UID: \"6d3d9d05-b7a1-45ef-86a5-edc7d2ba3903\") " pod="openshift-machine-config-operator/machine-config-server-sp4jc" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.267472 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fv498\" (UniqueName: \"kubernetes.io/projected/cb9c789a-4f39-46ee-8e61-5e39d65daf38-kube-api-access-fv498\") pod \"marketplace-operator-79b997595-dkmtc\" (UID: \"cb9c789a-4f39-46ee-8e61-5e39d65daf38\") " pod="openshift-marketplace/marketplace-operator-79b997595-dkmtc" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.268581 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8a39cfe4-4366-4e3c-81a1-5a0694840afb-service-ca-bundle\") pod \"router-default-5444994796-f4jjc\" (UID: \"8a39cfe4-4366-4e3c-81a1-5a0694840afb\") " pod="openshift-ingress/router-default-5444994796-f4jjc" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.269576 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/8a39cfe4-4366-4e3c-81a1-5a0694840afb-stats-auth\") pod \"router-default-5444994796-f4jjc\" (UID: \"8a39cfe4-4366-4e3c-81a1-5a0694840afb\") " pod="openshift-ingress/router-default-5444994796-f4jjc" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.269688 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/5f395791-fcf5-4602-903b-06c24127b40e-registry-certificates\") pod \"image-registry-697d97f7c8-rzc4j\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.269875 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6161b6da-ce56-4639-be44-28e54239bce0-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-bbsrp\" (UID: \"6161b6da-ce56-4639-be44-28e54239bce0\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bbsrp" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.270003 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5f395791-fcf5-4602-903b-06c24127b40e-trusted-ca\") pod \"image-registry-697d97f7c8-rzc4j\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.270077 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c4bc15d8-8992-4e59-b6aa-633433fdce22-config\") pod \"kube-apiserver-operator-766d6c64bb-2tdjp\" (UID: \"c4bc15d8-8992-4e59-b6aa-633433fdce22\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2tdjp" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.270202 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z8fpr\" (UniqueName: \"kubernetes.io/projected/6d3d9d05-b7a1-45ef-86a5-edc7d2ba3903-kube-api-access-z8fpr\") pod \"machine-config-server-sp4jc\" (UID: \"6d3d9d05-b7a1-45ef-86a5-edc7d2ba3903\") " pod="openshift-machine-config-operator/machine-config-server-sp4jc" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.270264 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/c167fa4c-07a4-4a57-8fc0-a56b59c54ee6-csi-data-dir\") pod \"csi-hostpathplugin-vkr5d\" (UID: \"c167fa4c-07a4-4a57-8fc0-a56b59c54ee6\") " pod="hostpath-provisioner/csi-hostpathplugin-vkr5d" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.270722 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kfc67\" (UniqueName: \"kubernetes.io/projected/1c0ae796-2abb-42c7-bbef-9ddbd496420a-kube-api-access-kfc67\") pod \"kube-storage-version-migrator-operator-b67b599dd-mvxx7\" (UID: \"1c0ae796-2abb-42c7-bbef-9ddbd496420a\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mvxx7" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.270988 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dpk8n\" (UniqueName: \"kubernetes.io/projected/5f395791-fcf5-4602-903b-06c24127b40e-kube-api-access-dpk8n\") pod \"image-registry-697d97f7c8-rzc4j\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.271027 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8a39cfe4-4366-4e3c-81a1-5a0694840afb-metrics-certs\") pod \"router-default-5444994796-f4jjc\" (UID: \"8a39cfe4-4366-4e3c-81a1-5a0694840afb\") " pod="openshift-ingress/router-default-5444994796-f4jjc" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.271046 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6161b6da-ce56-4639-be44-28e54239bce0-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-bbsrp\" (UID: \"6161b6da-ce56-4639-be44-28e54239bce0\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bbsrp" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.271121 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/5f395791-fcf5-4602-903b-06c24127b40e-ca-trust-extracted\") pod \"image-registry-697d97f7c8-rzc4j\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.271138 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/8a39cfe4-4366-4e3c-81a1-5a0694840afb-default-certificate\") pod \"router-default-5444994796-f4jjc\" (UID: \"8a39cfe4-4366-4e3c-81a1-5a0694840afb\") " pod="openshift-ingress/router-default-5444994796-f4jjc" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.271205 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/b9929052-c8a6-48a6-9520-9b8f4dc396e0-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-bp6p9\" (UID: \"b9929052-c8a6-48a6-9520-9b8f4dc396e0\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bp6p9" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.271278 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/bf4e3cb9-5686-44c9-ad62-1c4bd7e44d8a-srv-cert\") pod \"catalog-operator-68c6474976-jngcm\" (UID: \"bf4e3cb9-5686-44c9-ad62-1c4bd7e44d8a\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-jngcm" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.272702 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3d47c69e-9ca4-4bd5-a362-3c0051825a7e-trusted-ca\") pod \"ingress-operator-5b745b69d9-jvmch\" (UID: \"3d47c69e-9ca4-4bd5-a362-3c0051825a7e\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jvmch" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.272843 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/5f395791-fcf5-4602-903b-06c24127b40e-ca-trust-extracted\") pod \"image-registry-697d97f7c8-rzc4j\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.272934 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/30cc1270-b838-459f-a925-f971ed08b550-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-67pmp\" (UID: \"30cc1270-b838-459f-a925-f971ed08b550\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-67pmp" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.273580 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/5f395791-fcf5-4602-903b-06c24127b40e-installation-pull-secrets\") pod \"image-registry-697d97f7c8-rzc4j\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.273827 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/bf4e3cb9-5686-44c9-ad62-1c4bd7e44d8a-profile-collector-cert\") pod \"catalog-operator-68c6474976-jngcm\" (UID: \"bf4e3cb9-5686-44c9-ad62-1c4bd7e44d8a\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-jngcm" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.273870 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/3d47c69e-9ca4-4bd5-a362-3c0051825a7e-metrics-tls\") pod \"ingress-operator-5b745b69d9-jvmch\" (UID: \"3d47c69e-9ca4-4bd5-a362-3c0051825a7e\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jvmch" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.274091 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/b9929052-c8a6-48a6-9520-9b8f4dc396e0-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-bp6p9\" (UID: \"b9929052-c8a6-48a6-9520-9b8f4dc396e0\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bp6p9" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.274288 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fxm2z\" (UniqueName: \"kubernetes.io/projected/8a39cfe4-4366-4e3c-81a1-5a0694840afb-kube-api-access-fxm2z\") pod \"router-default-5444994796-f4jjc\" (UID: \"8a39cfe4-4366-4e3c-81a1-5a0694840afb\") " pod="openshift-ingress/router-default-5444994796-f4jjc" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.275970 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8a39cfe4-4366-4e3c-81a1-5a0694840afb-metrics-certs\") pod \"router-default-5444994796-f4jjc\" (UID: \"8a39cfe4-4366-4e3c-81a1-5a0694840afb\") " pod="openshift-ingress/router-default-5444994796-f4jjc" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.276095 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-fz89h" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.276426 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/bf4e3cb9-5686-44c9-ad62-1c4bd7e44d8a-srv-cert\") pod \"catalog-operator-68c6474976-jngcm\" (UID: \"bf4e3cb9-5686-44c9-ad62-1c4bd7e44d8a\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-jngcm" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.276709 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rzc4j\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.276733 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/5f395791-fcf5-4602-903b-06c24127b40e-registry-tls\") pod \"image-registry-697d97f7c8-rzc4j\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.277446 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mtdhr\" (UniqueName: \"kubernetes.io/projected/bbb3233c-65ea-4bb4-aaa5-1d80fef4638d-kube-api-access-mtdhr\") pod \"collect-profiles-29318970-52fch\" (UID: \"bbb3233c-65ea-4bb4-aaa5-1d80fef4638d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29318970-52fch" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.277526 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/cb9c789a-4f39-46ee-8e61-5e39d65daf38-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-dkmtc\" (UID: \"cb9c789a-4f39-46ee-8e61-5e39d65daf38\") " pod="openshift-marketplace/marketplace-operator-79b997595-dkmtc" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.277545 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/c167fa4c-07a4-4a57-8fc0-a56b59c54ee6-plugins-dir\") pod \"csi-hostpathplugin-vkr5d\" (UID: \"c167fa4c-07a4-4a57-8fc0-a56b59c54ee6\") " pod="hostpath-provisioner/csi-hostpathplugin-vkr5d" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.278002 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/5cc982ee-40a1-4ac8-a936-a626ee218633-metrics-tls\") pod \"dns-default-jx6p6\" (UID: \"5cc982ee-40a1-4ac8-a936-a626ee218633\") " pod="openshift-dns/dns-default-jx6p6" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.278193 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/c167fa4c-07a4-4a57-8fc0-a56b59c54ee6-socket-dir\") pod \"csi-hostpathplugin-vkr5d\" (UID: \"c167fa4c-07a4-4a57-8fc0-a56b59c54ee6\") " pod="hostpath-provisioner/csi-hostpathplugin-vkr5d" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.279781 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j77mq\" (UniqueName: \"kubernetes.io/projected/b59a8e7c-0cc5-4ea8-ac5f-2d80959f8933-kube-api-access-j77mq\") pod \"package-server-manager-789f6589d5-qwpch\" (UID: \"b59a8e7c-0cc5-4ea8-ac5f-2d80959f8933\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qwpch" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.279827 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4dss2\" (UniqueName: \"kubernetes.io/projected/bf4e3cb9-5686-44c9-ad62-1c4bd7e44d8a-kube-api-access-4dss2\") pod \"catalog-operator-68c6474976-jngcm\" (UID: \"bf4e3cb9-5686-44c9-ad62-1c4bd7e44d8a\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-jngcm" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.280460 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/3d47c69e-9ca4-4bd5-a362-3c0051825a7e-metrics-tls\") pod \"ingress-operator-5b745b69d9-jvmch\" (UID: \"3d47c69e-9ca4-4bd5-a362-3c0051825a7e\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jvmch" Sep 29 09:31:43 crc kubenswrapper[4779]: E0929 09:31:43.280696 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 09:31:43.780680291 +0000 UTC m=+135.762004195 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rzc4j" (UID: "5f395791-fcf5-4602-903b-06c24127b40e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.281089 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3d47c69e-9ca4-4bd5-a362-3c0051825a7e-bound-sa-token\") pod \"ingress-operator-5b745b69d9-jvmch\" (UID: \"3d47c69e-9ca4-4bd5-a362-3c0051825a7e\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jvmch" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.281254 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/30cc1270-b838-459f-a925-f971ed08b550-config\") pod \"kube-controller-manager-operator-78b949d7b-67pmp\" (UID: \"30cc1270-b838-459f-a925-f971ed08b550\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-67pmp" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.281295 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c4bc15d8-8992-4e59-b6aa-633433fdce22-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-2tdjp\" (UID: \"c4bc15d8-8992-4e59-b6aa-633433fdce22\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2tdjp" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.281677 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/dc8659b1-2788-498e-bc4c-c294328dde71-signing-cabundle\") pod \"service-ca-9c57cc56f-blxvw\" (UID: \"dc8659b1-2788-498e-bc4c-c294328dde71\") " pod="openshift-service-ca/service-ca-9c57cc56f-blxvw" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.281704 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ce544002-58ff-46a0-a565-291a6dd31673-config\") pod \"service-ca-operator-777779d784-b8g42\" (UID: \"ce544002-58ff-46a0-a565-291a6dd31673\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-b8g42" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.281753 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/30cc1270-b838-459f-a925-f971ed08b550-config\") pod \"kube-controller-manager-operator-78b949d7b-67pmp\" (UID: \"30cc1270-b838-459f-a925-f971ed08b550\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-67pmp" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.281805 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/6d3d9d05-b7a1-45ef-86a5-edc7d2ba3903-node-bootstrap-token\") pod \"machine-config-server-sp4jc\" (UID: \"6d3d9d05-b7a1-45ef-86a5-edc7d2ba3903\") " pod="openshift-machine-config-operator/machine-config-server-sp4jc" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.282070 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/bbb3233c-65ea-4bb4-aaa5-1d80fef4638d-config-volume\") pod \"collect-profiles-29318970-52fch\" (UID: \"bbb3233c-65ea-4bb4-aaa5-1d80fef4638d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29318970-52fch" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.282154 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6xf4m\" (UniqueName: \"kubernetes.io/projected/ce544002-58ff-46a0-a565-291a6dd31673-kube-api-access-6xf4m\") pod \"service-ca-operator-777779d784-b8g42\" (UID: \"ce544002-58ff-46a0-a565-291a6dd31673\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-b8g42" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.282195 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/2e7822b0-05f5-44bc-aeb2-2fa46992e016-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-sfkpc\" (UID: \"2e7822b0-05f5-44bc-aeb2-2fa46992e016\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-sfkpc" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.282332 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c4bc15d8-8992-4e59-b6aa-633433fdce22-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-2tdjp\" (UID: \"c4bc15d8-8992-4e59-b6aa-633433fdce22\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2tdjp" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.282351 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/c167fa4c-07a4-4a57-8fc0-a56b59c54ee6-mountpoint-dir\") pod \"csi-hostpathplugin-vkr5d\" (UID: \"c167fa4c-07a4-4a57-8fc0-a56b59c54ee6\") " pod="hostpath-provisioner/csi-hostpathplugin-vkr5d" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.282387 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vt2f8\" (UniqueName: \"kubernetes.io/projected/2e7822b0-05f5-44bc-aeb2-2fa46992e016-kube-api-access-vt2f8\") pod \"multus-admission-controller-857f4d67dd-sfkpc\" (UID: \"2e7822b0-05f5-44bc-aeb2-2fa46992e016\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-sfkpc" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.282443 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rkgbf\" (UniqueName: \"kubernetes.io/projected/dc8659b1-2788-498e-bc4c-c294328dde71-kube-api-access-rkgbf\") pod \"service-ca-9c57cc56f-blxvw\" (UID: \"dc8659b1-2788-498e-bc4c-c294328dde71\") " pod="openshift-service-ca/service-ca-9c57cc56f-blxvw" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.282660 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1c0ae796-2abb-42c7-bbef-9ddbd496420a-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-mvxx7\" (UID: \"1c0ae796-2abb-42c7-bbef-9ddbd496420a\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mvxx7" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.282687 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/bbb3233c-65ea-4bb4-aaa5-1d80fef4638d-secret-volume\") pod \"collect-profiles-29318970-52fch\" (UID: \"bbb3233c-65ea-4bb4-aaa5-1d80fef4638d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29318970-52fch" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.283842 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vm6tf\" (UniqueName: \"kubernetes.io/projected/3d47c69e-9ca4-4bd5-a362-3c0051825a7e-kube-api-access-vm6tf\") pod \"ingress-operator-5b745b69d9-jvmch\" (UID: \"3d47c69e-9ca4-4bd5-a362-3c0051825a7e\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jvmch" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.284002 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/5f395791-fcf5-4602-903b-06c24127b40e-registry-tls\") pod \"image-registry-697d97f7c8-rzc4j\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.284761 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/8a39cfe4-4366-4e3c-81a1-5a0694840afb-default-certificate\") pod \"router-default-5444994796-f4jjc\" (UID: \"8a39cfe4-4366-4e3c-81a1-5a0694840afb\") " pod="openshift-ingress/router-default-5444994796-f4jjc" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.285252 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/bf4e3cb9-5686-44c9-ad62-1c4bd7e44d8a-profile-collector-cert\") pod \"catalog-operator-68c6474976-jngcm\" (UID: \"bf4e3cb9-5686-44c9-ad62-1c4bd7e44d8a\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-jngcm" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.290638 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6161b6da-ce56-4639-be44-28e54239bce0-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-bbsrp\" (UID: \"6161b6da-ce56-4639-be44-28e54239bce0\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bbsrp" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.294443 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/5f395791-fcf5-4602-903b-06c24127b40e-installation-pull-secrets\") pod \"image-registry-697d97f7c8-rzc4j\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.300333 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5f395791-fcf5-4602-903b-06c24127b40e-bound-sa-token\") pod \"image-registry-697d97f7c8-rzc4j\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.304162 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-tcl8h" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.312425 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-djlds" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.319578 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/30cc1270-b838-459f-a925-f971ed08b550-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-67pmp\" (UID: \"30cc1270-b838-459f-a925-f971ed08b550\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-67pmp" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.320715 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xfn5c" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.324954 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4xmkd" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.334513 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-b2xpr" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.336648 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-dvvh4"] Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.357721 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zp8wt\" (UniqueName: \"kubernetes.io/projected/6161b6da-ce56-4639-be44-28e54239bce0-kube-api-access-zp8wt\") pod \"openshift-controller-manager-operator-756b6f6bc6-bbsrp\" (UID: \"6161b6da-ce56-4639-be44-28e54239bce0\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bbsrp" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.372793 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-zfpqh"] Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.378986 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-7zdlq"] Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.384093 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.384252 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qsws8\" (UniqueName: \"kubernetes.io/projected/b9929052-c8a6-48a6-9520-9b8f4dc396e0-kube-api-access-qsws8\") pod \"control-plane-machine-set-operator-78cbb6b69f-bp6p9\" (UID: \"b9929052-c8a6-48a6-9520-9b8f4dc396e0\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bp6p9" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.384324 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5cc982ee-40a1-4ac8-a936-a626ee218633-config-volume\") pod \"dns-default-jx6p6\" (UID: \"5cc982ee-40a1-4ac8-a936-a626ee218633\") " pod="openshift-dns/dns-default-jx6p6" Sep 29 09:31:43 crc kubenswrapper[4779]: E0929 09:31:43.384362 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 09:31:43.884340828 +0000 UTC m=+135.865664792 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.384392 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4d97870e-816a-415f-a3ad-1ccdf24e035f-cert\") pod \"ingress-canary-xhrzk\" (UID: \"4d97870e-816a-415f-a3ad-1ccdf24e035f\") " pod="openshift-ingress-canary/ingress-canary-xhrzk" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.384431 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/c167fa4c-07a4-4a57-8fc0-a56b59c54ee6-registration-dir\") pod \"csi-hostpathplugin-vkr5d\" (UID: \"c167fa4c-07a4-4a57-8fc0-a56b59c54ee6\") " pod="hostpath-provisioner/csi-hostpathplugin-vkr5d" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.384466 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1c0ae796-2abb-42c7-bbef-9ddbd496420a-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-mvxx7\" (UID: \"1c0ae796-2abb-42c7-bbef-9ddbd496420a\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mvxx7" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.384492 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/6d3d9d05-b7a1-45ef-86a5-edc7d2ba3903-certs\") pod \"machine-config-server-sp4jc\" (UID: \"6d3d9d05-b7a1-45ef-86a5-edc7d2ba3903\") " pod="openshift-machine-config-operator/machine-config-server-sp4jc" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.384514 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fv498\" (UniqueName: \"kubernetes.io/projected/cb9c789a-4f39-46ee-8e61-5e39d65daf38-kube-api-access-fv498\") pod \"marketplace-operator-79b997595-dkmtc\" (UID: \"cb9c789a-4f39-46ee-8e61-5e39d65daf38\") " pod="openshift-marketplace/marketplace-operator-79b997595-dkmtc" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.384535 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c4bc15d8-8992-4e59-b6aa-633433fdce22-config\") pod \"kube-apiserver-operator-766d6c64bb-2tdjp\" (UID: \"c4bc15d8-8992-4e59-b6aa-633433fdce22\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2tdjp" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.384557 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z8fpr\" (UniqueName: \"kubernetes.io/projected/6d3d9d05-b7a1-45ef-86a5-edc7d2ba3903-kube-api-access-z8fpr\") pod \"machine-config-server-sp4jc\" (UID: \"6d3d9d05-b7a1-45ef-86a5-edc7d2ba3903\") " pod="openshift-machine-config-operator/machine-config-server-sp4jc" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.384584 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/c167fa4c-07a4-4a57-8fc0-a56b59c54ee6-csi-data-dir\") pod \"csi-hostpathplugin-vkr5d\" (UID: \"c167fa4c-07a4-4a57-8fc0-a56b59c54ee6\") " pod="hostpath-provisioner/csi-hostpathplugin-vkr5d" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.384607 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kfc67\" (UniqueName: \"kubernetes.io/projected/1c0ae796-2abb-42c7-bbef-9ddbd496420a-kube-api-access-kfc67\") pod \"kube-storage-version-migrator-operator-b67b599dd-mvxx7\" (UID: \"1c0ae796-2abb-42c7-bbef-9ddbd496420a\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mvxx7" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.384695 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rzc4j\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.384722 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mtdhr\" (UniqueName: \"kubernetes.io/projected/bbb3233c-65ea-4bb4-aaa5-1d80fef4638d-kube-api-access-mtdhr\") pod \"collect-profiles-29318970-52fch\" (UID: \"bbb3233c-65ea-4bb4-aaa5-1d80fef4638d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29318970-52fch" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.384743 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/cb9c789a-4f39-46ee-8e61-5e39d65daf38-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-dkmtc\" (UID: \"cb9c789a-4f39-46ee-8e61-5e39d65daf38\") " pod="openshift-marketplace/marketplace-operator-79b997595-dkmtc" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.384762 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/c167fa4c-07a4-4a57-8fc0-a56b59c54ee6-plugins-dir\") pod \"csi-hostpathplugin-vkr5d\" (UID: \"c167fa4c-07a4-4a57-8fc0-a56b59c54ee6\") " pod="hostpath-provisioner/csi-hostpathplugin-vkr5d" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.384785 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/5cc982ee-40a1-4ac8-a936-a626ee218633-metrics-tls\") pod \"dns-default-jx6p6\" (UID: \"5cc982ee-40a1-4ac8-a936-a626ee218633\") " pod="openshift-dns/dns-default-jx6p6" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.384810 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/c167fa4c-07a4-4a57-8fc0-a56b59c54ee6-socket-dir\") pod \"csi-hostpathplugin-vkr5d\" (UID: \"c167fa4c-07a4-4a57-8fc0-a56b59c54ee6\") " pod="hostpath-provisioner/csi-hostpathplugin-vkr5d" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.384840 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j77mq\" (UniqueName: \"kubernetes.io/projected/b59a8e7c-0cc5-4ea8-ac5f-2d80959f8933-kube-api-access-j77mq\") pod \"package-server-manager-789f6589d5-qwpch\" (UID: \"b59a8e7c-0cc5-4ea8-ac5f-2d80959f8933\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qwpch" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.384892 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c4bc15d8-8992-4e59-b6aa-633433fdce22-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-2tdjp\" (UID: \"c4bc15d8-8992-4e59-b6aa-633433fdce22\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2tdjp" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.384896 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/c167fa4c-07a4-4a57-8fc0-a56b59c54ee6-csi-data-dir\") pod \"csi-hostpathplugin-vkr5d\" (UID: \"c167fa4c-07a4-4a57-8fc0-a56b59c54ee6\") " pod="hostpath-provisioner/csi-hostpathplugin-vkr5d" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.385280 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/c167fa4c-07a4-4a57-8fc0-a56b59c54ee6-plugins-dir\") pod \"csi-hostpathplugin-vkr5d\" (UID: \"c167fa4c-07a4-4a57-8fc0-a56b59c54ee6\") " pod="hostpath-provisioner/csi-hostpathplugin-vkr5d" Sep 29 09:31:43 crc kubenswrapper[4779]: E0929 09:31:43.385648 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 09:31:43.885637768 +0000 UTC m=+135.866961672 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rzc4j" (UID: "5f395791-fcf5-4602-903b-06c24127b40e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.390292 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/dc8659b1-2788-498e-bc4c-c294328dde71-signing-cabundle\") pod \"service-ca-9c57cc56f-blxvw\" (UID: \"dc8659b1-2788-498e-bc4c-c294328dde71\") " pod="openshift-service-ca/service-ca-9c57cc56f-blxvw" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.390323 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ce544002-58ff-46a0-a565-291a6dd31673-config\") pod \"service-ca-operator-777779d784-b8g42\" (UID: \"ce544002-58ff-46a0-a565-291a6dd31673\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-b8g42" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.390353 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/6d3d9d05-b7a1-45ef-86a5-edc7d2ba3903-node-bootstrap-token\") pod \"machine-config-server-sp4jc\" (UID: \"6d3d9d05-b7a1-45ef-86a5-edc7d2ba3903\") " pod="openshift-machine-config-operator/machine-config-server-sp4jc" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.390382 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/bbb3233c-65ea-4bb4-aaa5-1d80fef4638d-config-volume\") pod \"collect-profiles-29318970-52fch\" (UID: \"bbb3233c-65ea-4bb4-aaa5-1d80fef4638d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29318970-52fch" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.390447 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6xf4m\" (UniqueName: \"kubernetes.io/projected/ce544002-58ff-46a0-a565-291a6dd31673-kube-api-access-6xf4m\") pod \"service-ca-operator-777779d784-b8g42\" (UID: \"ce544002-58ff-46a0-a565-291a6dd31673\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-b8g42" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.390474 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/2e7822b0-05f5-44bc-aeb2-2fa46992e016-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-sfkpc\" (UID: \"2e7822b0-05f5-44bc-aeb2-2fa46992e016\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-sfkpc" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.390507 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c4bc15d8-8992-4e59-b6aa-633433fdce22-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-2tdjp\" (UID: \"c4bc15d8-8992-4e59-b6aa-633433fdce22\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2tdjp" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.390939 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/c167fa4c-07a4-4a57-8fc0-a56b59c54ee6-mountpoint-dir\") pod \"csi-hostpathplugin-vkr5d\" (UID: \"c167fa4c-07a4-4a57-8fc0-a56b59c54ee6\") " pod="hostpath-provisioner/csi-hostpathplugin-vkr5d" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.390964 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vt2f8\" (UniqueName: \"kubernetes.io/projected/2e7822b0-05f5-44bc-aeb2-2fa46992e016-kube-api-access-vt2f8\") pod \"multus-admission-controller-857f4d67dd-sfkpc\" (UID: \"2e7822b0-05f5-44bc-aeb2-2fa46992e016\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-sfkpc" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.390994 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rkgbf\" (UniqueName: \"kubernetes.io/projected/dc8659b1-2788-498e-bc4c-c294328dde71-kube-api-access-rkgbf\") pod \"service-ca-9c57cc56f-blxvw\" (UID: \"dc8659b1-2788-498e-bc4c-c294328dde71\") " pod="openshift-service-ca/service-ca-9c57cc56f-blxvw" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.391016 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/bbb3233c-65ea-4bb4-aaa5-1d80fef4638d-secret-volume\") pod \"collect-profiles-29318970-52fch\" (UID: \"bbb3233c-65ea-4bb4-aaa5-1d80fef4638d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29318970-52fch" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.391045 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1c0ae796-2abb-42c7-bbef-9ddbd496420a-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-mvxx7\" (UID: \"1c0ae796-2abb-42c7-bbef-9ddbd496420a\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mvxx7" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.391075 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jz85s\" (UniqueName: \"kubernetes.io/projected/4d97870e-816a-415f-a3ad-1ccdf24e035f-kube-api-access-jz85s\") pod \"ingress-canary-xhrzk\" (UID: \"4d97870e-816a-415f-a3ad-1ccdf24e035f\") " pod="openshift-ingress-canary/ingress-canary-xhrzk" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.391104 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/cb9c789a-4f39-46ee-8e61-5e39d65daf38-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-dkmtc\" (UID: \"cb9c789a-4f39-46ee-8e61-5e39d65daf38\") " pod="openshift-marketplace/marketplace-operator-79b997595-dkmtc" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.391128 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/dc8659b1-2788-498e-bc4c-c294328dde71-signing-key\") pod \"service-ca-9c57cc56f-blxvw\" (UID: \"dc8659b1-2788-498e-bc4c-c294328dde71\") " pod="openshift-service-ca/service-ca-9c57cc56f-blxvw" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.391167 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d2qpg\" (UniqueName: \"kubernetes.io/projected/5cc982ee-40a1-4ac8-a936-a626ee218633-kube-api-access-d2qpg\") pod \"dns-default-jx6p6\" (UID: \"5cc982ee-40a1-4ac8-a936-a626ee218633\") " pod="openshift-dns/dns-default-jx6p6" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.391188 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g6x5p\" (UniqueName: \"kubernetes.io/projected/c167fa4c-07a4-4a57-8fc0-a56b59c54ee6-kube-api-access-g6x5p\") pod \"csi-hostpathplugin-vkr5d\" (UID: \"c167fa4c-07a4-4a57-8fc0-a56b59c54ee6\") " pod="hostpath-provisioner/csi-hostpathplugin-vkr5d" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.391209 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ce544002-58ff-46a0-a565-291a6dd31673-serving-cert\") pod \"service-ca-operator-777779d784-b8g42\" (UID: \"ce544002-58ff-46a0-a565-291a6dd31673\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-b8g42" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.391233 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/b59a8e7c-0cc5-4ea8-ac5f-2d80959f8933-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-qwpch\" (UID: \"b59a8e7c-0cc5-4ea8-ac5f-2d80959f8933\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qwpch" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.391546 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/c167fa4c-07a4-4a57-8fc0-a56b59c54ee6-registration-dir\") pod \"csi-hostpathplugin-vkr5d\" (UID: \"c167fa4c-07a4-4a57-8fc0-a56b59c54ee6\") " pod="hostpath-provisioner/csi-hostpathplugin-vkr5d" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.392118 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-8gbfr"] Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.392174 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/6d3d9d05-b7a1-45ef-86a5-edc7d2ba3903-certs\") pod \"machine-config-server-sp4jc\" (UID: \"6d3d9d05-b7a1-45ef-86a5-edc7d2ba3903\") " pod="openshift-machine-config-operator/machine-config-server-sp4jc" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.387395 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/cb9c789a-4f39-46ee-8e61-5e39d65daf38-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-dkmtc\" (UID: \"cb9c789a-4f39-46ee-8e61-5e39d65daf38\") " pod="openshift-marketplace/marketplace-operator-79b997595-dkmtc" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.392967 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c4bc15d8-8992-4e59-b6aa-633433fdce22-config\") pod \"kube-apiserver-operator-766d6c64bb-2tdjp\" (UID: \"c4bc15d8-8992-4e59-b6aa-633433fdce22\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2tdjp" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.393220 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/c167fa4c-07a4-4a57-8fc0-a56b59c54ee6-socket-dir\") pod \"csi-hostpathplugin-vkr5d\" (UID: \"c167fa4c-07a4-4a57-8fc0-a56b59c54ee6\") " pod="hostpath-provisioner/csi-hostpathplugin-vkr5d" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.393400 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4d97870e-816a-415f-a3ad-1ccdf24e035f-cert\") pod \"ingress-canary-xhrzk\" (UID: \"4d97870e-816a-415f-a3ad-1ccdf24e035f\") " pod="openshift-ingress-canary/ingress-canary-xhrzk" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.393457 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ce544002-58ff-46a0-a565-291a6dd31673-config\") pod \"service-ca-operator-777779d784-b8g42\" (UID: \"ce544002-58ff-46a0-a565-291a6dd31673\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-b8g42" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.394335 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/dc8659b1-2788-498e-bc4c-c294328dde71-signing-cabundle\") pod \"service-ca-9c57cc56f-blxvw\" (UID: \"dc8659b1-2788-498e-bc4c-c294328dde71\") " pod="openshift-service-ca/service-ca-9c57cc56f-blxvw" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.384840 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5cc982ee-40a1-4ac8-a936-a626ee218633-config-volume\") pod \"dns-default-jx6p6\" (UID: \"5cc982ee-40a1-4ac8-a936-a626ee218633\") " pod="openshift-dns/dns-default-jx6p6" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.396360 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/c167fa4c-07a4-4a57-8fc0-a56b59c54ee6-mountpoint-dir\") pod \"csi-hostpathplugin-vkr5d\" (UID: \"c167fa4c-07a4-4a57-8fc0-a56b59c54ee6\") " pod="hostpath-provisioner/csi-hostpathplugin-vkr5d" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.396661 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/bbb3233c-65ea-4bb4-aaa5-1d80fef4638d-config-volume\") pod \"collect-profiles-29318970-52fch\" (UID: \"bbb3233c-65ea-4bb4-aaa5-1d80fef4638d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29318970-52fch" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.397930 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/b59a8e7c-0cc5-4ea8-ac5f-2d80959f8933-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-qwpch\" (UID: \"b59a8e7c-0cc5-4ea8-ac5f-2d80959f8933\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qwpch" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.398690 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/5cc982ee-40a1-4ac8-a936-a626ee218633-metrics-tls\") pod \"dns-default-jx6p6\" (UID: \"5cc982ee-40a1-4ac8-a936-a626ee218633\") " pod="openshift-dns/dns-default-jx6p6" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.400290 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/2e7822b0-05f5-44bc-aeb2-2fa46992e016-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-sfkpc\" (UID: \"2e7822b0-05f5-44bc-aeb2-2fa46992e016\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-sfkpc" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.401032 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1c0ae796-2abb-42c7-bbef-9ddbd496420a-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-mvxx7\" (UID: \"1c0ae796-2abb-42c7-bbef-9ddbd496420a\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mvxx7" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.401377 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/dc8659b1-2788-498e-bc4c-c294328dde71-signing-key\") pod \"service-ca-9c57cc56f-blxvw\" (UID: \"dc8659b1-2788-498e-bc4c-c294328dde71\") " pod="openshift-service-ca/service-ca-9c57cc56f-blxvw" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.403391 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-4m9cx"] Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.404946 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ce544002-58ff-46a0-a565-291a6dd31673-serving-cert\") pod \"service-ca-operator-777779d784-b8g42\" (UID: \"ce544002-58ff-46a0-a565-291a6dd31673\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-b8g42" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.414032 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/6d3d9d05-b7a1-45ef-86a5-edc7d2ba3903-node-bootstrap-token\") pod \"machine-config-server-sp4jc\" (UID: \"6d3d9d05-b7a1-45ef-86a5-edc7d2ba3903\") " pod="openshift-machine-config-operator/machine-config-server-sp4jc" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.414211 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1c0ae796-2abb-42c7-bbef-9ddbd496420a-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-mvxx7\" (UID: \"1c0ae796-2abb-42c7-bbef-9ddbd496420a\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mvxx7" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.429461 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/bbb3233c-65ea-4bb4-aaa5-1d80fef4638d-secret-volume\") pod \"collect-profiles-29318970-52fch\" (UID: \"bbb3233c-65ea-4bb4-aaa5-1d80fef4638d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29318970-52fch" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.429834 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dpk8n\" (UniqueName: \"kubernetes.io/projected/5f395791-fcf5-4602-903b-06c24127b40e-kube-api-access-dpk8n\") pod \"image-registry-697d97f7c8-rzc4j\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.430005 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c4bc15d8-8992-4e59-b6aa-633433fdce22-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-2tdjp\" (UID: \"c4bc15d8-8992-4e59-b6aa-633433fdce22\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2tdjp" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.430220 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/cb9c789a-4f39-46ee-8e61-5e39d65daf38-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-dkmtc\" (UID: \"cb9c789a-4f39-46ee-8e61-5e39d65daf38\") " pod="openshift-marketplace/marketplace-operator-79b997595-dkmtc" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.430443 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fxm2z\" (UniqueName: \"kubernetes.io/projected/8a39cfe4-4366-4e3c-81a1-5a0694840afb-kube-api-access-fxm2z\") pod \"router-default-5444994796-f4jjc\" (UID: \"8a39cfe4-4366-4e3c-81a1-5a0694840afb\") " pod="openshift-ingress/router-default-5444994796-f4jjc" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.437416 4779 generic.go:334] "Generic (PLEG): container finished" podID="4a3ce3bd-7194-4b10-9788-2b570b1814c0" containerID="1f673d12f0e5013f675aff05cfb0ade498fee74e117a26dccced7ec7498b722e" exitCode=0 Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.437517 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44nv6" event={"ID":"4a3ce3bd-7194-4b10-9788-2b570b1814c0","Type":"ContainerDied","Data":"1f673d12f0e5013f675aff05cfb0ade498fee74e117a26dccced7ec7498b722e"} Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.444293 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cscv9"] Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.448384 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-zhwns" event={"ID":"f420d1df-5091-46b0-be80-9a83e5be1a65","Type":"ContainerStarted","Data":"44da4ea29e958de0582348fb6c25df474db7315734f0d84a8b49b47a4790f28d"} Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.448431 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-zhwns" event={"ID":"f420d1df-5091-46b0-be80-9a83e5be1a65","Type":"ContainerStarted","Data":"947f8abb3e8ff76de2f5662a230918fb612421ed4378bf5528229ca82780b454"} Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.456411 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-v6825" event={"ID":"67e6a9c5-a695-4683-9df7-cab5bace357c","Type":"ContainerStarted","Data":"d496880a99b9421399da47f8e1ba3eb037ce10d5fba591a4fe1752d1dbf8f9a2"} Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.458876 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4dss2\" (UniqueName: \"kubernetes.io/projected/bf4e3cb9-5686-44c9-ad62-1c4bd7e44d8a-kube-api-access-4dss2\") pod \"catalog-operator-68c6474976-jngcm\" (UID: \"bf4e3cb9-5686-44c9-ad62-1c4bd7e44d8a\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-jngcm" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.459275 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3d47c69e-9ca4-4bd5-a362-3c0051825a7e-bound-sa-token\") pod \"ingress-operator-5b745b69d9-jvmch\" (UID: \"3d47c69e-9ca4-4bd5-a362-3c0051825a7e\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jvmch" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.461331 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-dnrj5"] Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.468454 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-fq82t" event={"ID":"edc8c507-aa54-4f0e-b64f-265ff1860ca0","Type":"ContainerStarted","Data":"5dc9f4407ca74fc7d35d205d946e08a4c09f35e40881be2332050a449cb407e6"} Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.487643 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-j6lzv" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.502300 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 09:31:43 crc kubenswrapper[4779]: E0929 09:31:43.503988 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 09:31:44.003967355 +0000 UTC m=+135.985291259 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.504433 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kfc67\" (UniqueName: \"kubernetes.io/projected/1c0ae796-2abb-42c7-bbef-9ddbd496420a-kube-api-access-kfc67\") pod \"kube-storage-version-migrator-operator-b67b599dd-mvxx7\" (UID: \"1c0ae796-2abb-42c7-bbef-9ddbd496420a\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mvxx7" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.520011 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-67pmp" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.529511 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-f4jjc" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.530555 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mtdhr\" (UniqueName: \"kubernetes.io/projected/bbb3233c-65ea-4bb4-aaa5-1d80fef4638d-kube-api-access-mtdhr\") pod \"collect-profiles-29318970-52fch\" (UID: \"bbb3233c-65ea-4bb4-aaa5-1d80fef4638d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29318970-52fch" Sep 29 09:31:43 crc kubenswrapper[4779]: W0929 09:31:43.538313 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4872deec_335d_4079_8981_1db7ef98b710.slice/crio-ecdf1da39fb9df996fca6ead81e998674179c06237690d1229fffcf3ef357fa6 WatchSource:0}: Error finding container ecdf1da39fb9df996fca6ead81e998674179c06237690d1229fffcf3ef357fa6: Status 404 returned error can't find the container with id ecdf1da39fb9df996fca6ead81e998674179c06237690d1229fffcf3ef357fa6 Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.546455 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fv498\" (UniqueName: \"kubernetes.io/projected/cb9c789a-4f39-46ee-8e61-5e39d65daf38-kube-api-access-fv498\") pod \"marketplace-operator-79b997595-dkmtc\" (UID: \"cb9c789a-4f39-46ee-8e61-5e39d65daf38\") " pod="openshift-marketplace/marketplace-operator-79b997595-dkmtc" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.569778 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bbsrp" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.587001 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z8fpr\" (UniqueName: \"kubernetes.io/projected/6d3d9d05-b7a1-45ef-86a5-edc7d2ba3903-kube-api-access-z8fpr\") pod \"machine-config-server-sp4jc\" (UID: \"6d3d9d05-b7a1-45ef-86a5-edc7d2ba3903\") " pod="openshift-machine-config-operator/machine-config-server-sp4jc" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.589546 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j77mq\" (UniqueName: \"kubernetes.io/projected/b59a8e7c-0cc5-4ea8-ac5f-2d80959f8933-kube-api-access-j77mq\") pod \"package-server-manager-789f6589d5-qwpch\" (UID: \"b59a8e7c-0cc5-4ea8-ac5f-2d80959f8933\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qwpch" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.591283 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bp6p9" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.606279 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jvmch" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.607584 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rzc4j\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:43 crc kubenswrapper[4779]: E0929 09:31:43.609446 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 09:31:44.109423647 +0000 UTC m=+136.090747551 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rzc4j" (UID: "5f395791-fcf5-4602-903b-06c24127b40e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.611839 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vt2f8\" (UniqueName: \"kubernetes.io/projected/2e7822b0-05f5-44bc-aeb2-2fa46992e016-kube-api-access-vt2f8\") pod \"multus-admission-controller-857f4d67dd-sfkpc\" (UID: \"2e7822b0-05f5-44bc-aeb2-2fa46992e016\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-sfkpc" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.639534 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c4bc15d8-8992-4e59-b6aa-633433fdce22-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-2tdjp\" (UID: \"c4bc15d8-8992-4e59-b6aa-633433fdce22\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2tdjp" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.641005 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-jngcm" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.646066 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g6x5p\" (UniqueName: \"kubernetes.io/projected/c167fa4c-07a4-4a57-8fc0-a56b59c54ee6-kube-api-access-g6x5p\") pod \"csi-hostpathplugin-vkr5d\" (UID: \"c167fa4c-07a4-4a57-8fc0-a56b59c54ee6\") " pod="hostpath-provisioner/csi-hostpathplugin-vkr5d" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.665564 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d2qpg\" (UniqueName: \"kubernetes.io/projected/5cc982ee-40a1-4ac8-a936-a626ee218633-kube-api-access-d2qpg\") pod \"dns-default-jx6p6\" (UID: \"5cc982ee-40a1-4ac8-a936-a626ee218633\") " pod="openshift-dns/dns-default-jx6p6" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.666950 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mvxx7" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.671687 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-sfkpc" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.678970 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2tdjp" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.682774 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jz85s\" (UniqueName: \"kubernetes.io/projected/4d97870e-816a-415f-a3ad-1ccdf24e035f-kube-api-access-jz85s\") pod \"ingress-canary-xhrzk\" (UID: \"4d97870e-816a-415f-a3ad-1ccdf24e035f\") " pod="openshift-ingress-canary/ingress-canary-xhrzk" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.686941 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-dkmtc" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.695856 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29318970-52fch" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.697046 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rkgbf\" (UniqueName: \"kubernetes.io/projected/dc8659b1-2788-498e-bc4c-c294328dde71-kube-api-access-rkgbf\") pod \"service-ca-9c57cc56f-blxvw\" (UID: \"dc8659b1-2788-498e-bc4c-c294328dde71\") " pod="openshift-service-ca/service-ca-9c57cc56f-blxvw" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.706682 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-jx6p6" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.708111 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-xqgjv"] Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.708481 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 09:31:43 crc kubenswrapper[4779]: E0929 09:31:43.708794 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 09:31:44.208769942 +0000 UTC m=+136.190093886 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:43 crc kubenswrapper[4779]: W0929 09:31:43.708794 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8a39cfe4_4366_4e3c_81a1_5a0694840afb.slice/crio-270543f4d5158ae498f2874aedd95cfa71a2b3c24e5f64a1368470ef60a6f7e3 WatchSource:0}: Error finding container 270543f4d5158ae498f2874aedd95cfa71a2b3c24e5f64a1368470ef60a6f7e3: Status 404 returned error can't find the container with id 270543f4d5158ae498f2874aedd95cfa71a2b3c24e5f64a1368470ef60a6f7e3 Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.713724 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-wbk58"] Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.719330 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qwpch" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.731432 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-blxvw" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.733705 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6xf4m\" (UniqueName: \"kubernetes.io/projected/ce544002-58ff-46a0-a565-291a6dd31673-kube-api-access-6xf4m\") pod \"service-ca-operator-777779d784-b8g42\" (UID: \"ce544002-58ff-46a0-a565-291a6dd31673\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-b8g42" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.734480 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-xhrzk" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.763782 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-vkr5d" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.763946 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-qpmlh"] Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.768876 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-sp4jc" Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.810420 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rzc4j\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:43 crc kubenswrapper[4779]: E0929 09:31:43.810891 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 09:31:44.310874912 +0000 UTC m=+136.292198816 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rzc4j" (UID: "5f395791-fcf5-4602-903b-06c24127b40e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.817357 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-tcl8h"] Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.914316 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 09:31:43 crc kubenswrapper[4779]: E0929 09:31:43.916272 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 09:31:44.416243782 +0000 UTC m=+136.397567686 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.969518 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4xmkd"] Sep 29 09:31:43 crc kubenswrapper[4779]: I0929 09:31:43.969554 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-4sjxm"] Sep 29 09:31:44 crc kubenswrapper[4779]: I0929 09:31:44.012243 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-b8g42" Sep 29 09:31:44 crc kubenswrapper[4779]: I0929 09:31:44.022996 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rzc4j\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:44 crc kubenswrapper[4779]: E0929 09:31:44.023356 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 09:31:44.523341274 +0000 UTC m=+136.504665178 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rzc4j" (UID: "5f395791-fcf5-4602-903b-06c24127b40e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:44 crc kubenswrapper[4779]: I0929 09:31:44.048813 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-m9sp2"] Sep 29 09:31:44 crc kubenswrapper[4779]: I0929 09:31:44.098632 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-xfn5c"] Sep 29 09:31:44 crc kubenswrapper[4779]: I0929 09:31:44.104049 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-fz89h"] Sep 29 09:31:44 crc kubenswrapper[4779]: I0929 09:31:44.114994 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-djlds"] Sep 29 09:31:44 crc kubenswrapper[4779]: I0929 09:31:44.124573 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 09:31:44 crc kubenswrapper[4779]: E0929 09:31:44.124880 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 09:31:44.624867116 +0000 UTC m=+136.606191020 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:44 crc kubenswrapper[4779]: I0929 09:31:44.130694 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-b2xpr"] Sep 29 09:31:44 crc kubenswrapper[4779]: I0929 09:31:44.135005 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-67pmp"] Sep 29 09:31:44 crc kubenswrapper[4779]: W0929 09:31:44.182260 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod997b01e5_23d3_47fe_953f_ca0100bdb0b6.slice/crio-ec4a74086f1756d3ba68620dec0d4d421437c3dcc32c893ed60dc9a32891a171 WatchSource:0}: Error finding container ec4a74086f1756d3ba68620dec0d4d421437c3dcc32c893ed60dc9a32891a171: Status 404 returned error can't find the container with id ec4a74086f1756d3ba68620dec0d4d421437c3dcc32c893ed60dc9a32891a171 Sep 29 09:31:44 crc kubenswrapper[4779]: W0929 09:31:44.214967 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb2287b55_49f7_492d_a256_d300d4fee9c8.slice/crio-0ed04770c1d33e4c588a70331204771e8a3c839fdff3c35a72a97a88ecaf5f5a WatchSource:0}: Error finding container 0ed04770c1d33e4c588a70331204771e8a3c839fdff3c35a72a97a88ecaf5f5a: Status 404 returned error can't find the container with id 0ed04770c1d33e4c588a70331204771e8a3c839fdff3c35a72a97a88ecaf5f5a Sep 29 09:31:44 crc kubenswrapper[4779]: I0929 09:31:44.226495 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rzc4j\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:44 crc kubenswrapper[4779]: E0929 09:31:44.226885 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 09:31:44.726871063 +0000 UTC m=+136.708194967 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rzc4j" (UID: "5f395791-fcf5-4602-903b-06c24127b40e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:44 crc kubenswrapper[4779]: I0929 09:31:44.271695 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-zhwns" podStartSLOduration=117.271674235 podStartE2EDuration="1m57.271674235s" podCreationTimestamp="2025-09-29 09:29:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:31:44.245723701 +0000 UTC m=+136.227047605" watchObservedRunningTime="2025-09-29 09:31:44.271674235 +0000 UTC m=+136.252998129" Sep 29 09:31:44 crc kubenswrapper[4779]: I0929 09:31:44.327074 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 09:31:44 crc kubenswrapper[4779]: E0929 09:31:44.327480 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 09:31:44.827459205 +0000 UTC m=+136.808783109 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:44 crc kubenswrapper[4779]: I0929 09:31:44.429332 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rzc4j\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:44 crc kubenswrapper[4779]: E0929 09:31:44.429637 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 09:31:44.929627207 +0000 UTC m=+136.910951101 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rzc4j" (UID: "5f395791-fcf5-4602-903b-06c24127b40e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:44 crc kubenswrapper[4779]: I0929 09:31:44.432431 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mvxx7"] Sep 29 09:31:44 crc kubenswrapper[4779]: I0929 09:31:44.517205 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4m9cx" event={"ID":"ac283e2f-8cf1-4fae-ab94-8b4d9b57ac0c","Type":"ContainerStarted","Data":"591572edb7c09b2f9656067b550b5bf635c3359f5e27806d30fa5b66e11027f1"} Sep 29 09:31:44 crc kubenswrapper[4779]: I0929 09:31:44.517242 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4m9cx" event={"ID":"ac283e2f-8cf1-4fae-ab94-8b4d9b57ac0c","Type":"ContainerStarted","Data":"8090d23a1def5249eacc631662414b03461acb5d1c74ffc75786dc2380099e9a"} Sep 29 09:31:44 crc kubenswrapper[4779]: I0929 09:31:44.517987 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4m9cx" Sep 29 09:31:44 crc kubenswrapper[4779]: I0929 09:31:44.526521 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bp6p9"] Sep 29 09:31:44 crc kubenswrapper[4779]: I0929 09:31:44.529674 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 09:31:44 crc kubenswrapper[4779]: E0929 09:31:44.530068 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 09:31:45.030051075 +0000 UTC m=+137.011374979 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:44 crc kubenswrapper[4779]: I0929 09:31:44.536270 4779 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-4m9cx container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.11:8443/healthz\": dial tcp 10.217.0.11:8443: connect: connection refused" start-of-body= Sep 29 09:31:44 crc kubenswrapper[4779]: I0929 09:31:44.536314 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4m9cx" podUID="ac283e2f-8cf1-4fae-ab94-8b4d9b57ac0c" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.11:8443/healthz\": dial tcp 10.217.0.11:8443: connect: connection refused" Sep 29 09:31:44 crc kubenswrapper[4779]: I0929 09:31:44.559934 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-v6825" event={"ID":"67e6a9c5-a695-4683-9df7-cab5bace357c","Type":"ContainerStarted","Data":"8007bc69fc4df7e0ad6d2f9bfa217568177c4e879748e909a3fdf2278ed66770"} Sep 29 09:31:44 crc kubenswrapper[4779]: I0929 09:31:44.570240 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-8gbfr" event={"ID":"b61fd91c-c774-44fd-9d5e-114aa59a1b39","Type":"ContainerStarted","Data":"5c61308255e5bd53a2f972577493acef5021829db0cf3ecaeed10bc46d7cb337"} Sep 29 09:31:44 crc kubenswrapper[4779]: I0929 09:31:44.587344 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-tcl8h" event={"ID":"997b01e5-23d3-47fe-953f-ca0100bdb0b6","Type":"ContainerStarted","Data":"ec4a74086f1756d3ba68620dec0d4d421437c3dcc32c893ed60dc9a32891a171"} Sep 29 09:31:44 crc kubenswrapper[4779]: I0929 09:31:44.589044 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-4sjxm" event={"ID":"23db3d43-22c2-4425-944c-201c682f382d","Type":"ContainerStarted","Data":"097784def4614089a4fd0277ce4bc1b0c1e7f3b3047881c7821e0aa584c25627"} Sep 29 09:31:44 crc kubenswrapper[4779]: I0929 09:31:44.590233 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-xqgjv" event={"ID":"2bdac139-9c3d-4490-b350-b0513395a281","Type":"ContainerStarted","Data":"09ada07219830acdd11a479d8060d3250f3267e68e92b0fa9604acc705a72f5a"} Sep 29 09:31:44 crc kubenswrapper[4779]: I0929 09:31:44.609624 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-qpmlh" event={"ID":"d48d8d73-7021-4ebd-8321-b7f73e330c1c","Type":"ContainerStarted","Data":"8e3c20eff902c1178eba5de4503bf3e9c2844cfb6bd03dbfa90ee457456eb6b5"} Sep 29 09:31:44 crc kubenswrapper[4779]: I0929 09:31:44.619851 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-fz89h" event={"ID":"b2287b55-49f7-492d-a256-d300d4fee9c8","Type":"ContainerStarted","Data":"0ed04770c1d33e4c588a70331204771e8a3c839fdff3c35a72a97a88ecaf5f5a"} Sep 29 09:31:44 crc kubenswrapper[4779]: I0929 09:31:44.620951 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cscv9" event={"ID":"4872deec-335d-4079-8981-1db7ef98b710","Type":"ContainerStarted","Data":"ecdf1da39fb9df996fca6ead81e998674179c06237690d1229fffcf3ef357fa6"} Sep 29 09:31:44 crc kubenswrapper[4779]: I0929 09:31:44.627379 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-m9sp2" event={"ID":"e537e250-466e-4019-a6bf-57b8301b954e","Type":"ContainerStarted","Data":"783895da61a0ef1c1fa403bcba1e13c90951ebfc4952520620fd65fe9fce4715"} Sep 29 09:31:44 crc kubenswrapper[4779]: I0929 09:31:44.632710 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rzc4j\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:44 crc kubenswrapper[4779]: E0929 09:31:44.635989 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 09:31:45.135964961 +0000 UTC m=+137.117288865 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rzc4j" (UID: "5f395791-fcf5-4602-903b-06c24127b40e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:44 crc kubenswrapper[4779]: I0929 09:31:44.638514 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-dvvh4" event={"ID":"2118e02f-c7d4-4def-a9f1-ee9d81b6408f","Type":"ContainerStarted","Data":"162576b523ea64489c5927c3c0410b157b078d22c54f230a915704aeaabf49e5"} Sep 29 09:31:44 crc kubenswrapper[4779]: I0929 09:31:44.638555 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-dvvh4" event={"ID":"2118e02f-c7d4-4def-a9f1-ee9d81b6408f","Type":"ContainerStarted","Data":"9db60f672ded3dac2983ad0c03018c35bd284956c22158a1dee2ab7e47197f94"} Sep 29 09:31:44 crc kubenswrapper[4779]: I0929 09:31:44.638691 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-dvvh4" Sep 29 09:31:44 crc kubenswrapper[4779]: I0929 09:31:44.664197 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-f4jjc" event={"ID":"8a39cfe4-4366-4e3c-81a1-5a0694840afb","Type":"ContainerStarted","Data":"270543f4d5158ae498f2874aedd95cfa71a2b3c24e5f64a1368470ef60a6f7e3"} Sep 29 09:31:44 crc kubenswrapper[4779]: I0929 09:31:44.665128 4779 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-dvvh4 container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.8:6443/healthz\": dial tcp 10.217.0.8:6443: connect: connection refused" start-of-body= Sep 29 09:31:44 crc kubenswrapper[4779]: I0929 09:31:44.665164 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-dvvh4" podUID="2118e02f-c7d4-4def-a9f1-ee9d81b6408f" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.8:6443/healthz\": dial tcp 10.217.0.8:6443: connect: connection refused" Sep 29 09:31:44 crc kubenswrapper[4779]: I0929 09:31:44.672370 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4xmkd" event={"ID":"e63abe9a-f86e-4dfa-9ddc-3a141ccff375","Type":"ContainerStarted","Data":"558fdf9f4301340e2b1a4c23989bd0c06f509cff3c0cc196e069c44fc2f8537a"} Sep 29 09:31:44 crc kubenswrapper[4779]: I0929 09:31:44.680540 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-dnrj5" event={"ID":"c01a0586-dcf7-41cb-a48f-a593e4436d7a","Type":"ContainerStarted","Data":"5793eba7252b2d34df6f3a20f89fa21ec154d4599e1a62f5f0041ac0dabb1eff"} Sep 29 09:31:44 crc kubenswrapper[4779]: I0929 09:31:44.683849 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-7zdlq" event={"ID":"8893bf59-e9ad-45f9-a1eb-bfd3ac808a0b","Type":"ContainerStarted","Data":"7923fb38b85f569d0b5b1b657004e688c6bdd3ff7ccfe0d8c15510b157ffb70c"} Sep 29 09:31:44 crc kubenswrapper[4779]: I0929 09:31:44.683882 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-7zdlq" event={"ID":"8893bf59-e9ad-45f9-a1eb-bfd3ac808a0b","Type":"ContainerStarted","Data":"b0498017474e1774e88e8fe9649ed19aa87450ae4ea37abcaefc9b9591bfed8a"} Sep 29 09:31:44 crc kubenswrapper[4779]: I0929 09:31:44.685648 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-67pmp" event={"ID":"30cc1270-b838-459f-a925-f971ed08b550","Type":"ContainerStarted","Data":"f9aff020dc820a76d80987b7daf96bdfd36b99f23a8d988f8eb2eade42859fb3"} Sep 29 09:31:44 crc kubenswrapper[4779]: I0929 09:31:44.686330 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-djlds" event={"ID":"580e28be-3dc6-4a6d-ab7f-3d89bae5ed8f","Type":"ContainerStarted","Data":"fe9a39d08c78c4873061c56002eaddae6e55064612f00e6675e3a64b1ab0705e"} Sep 29 09:31:44 crc kubenswrapper[4779]: I0929 09:31:44.686959 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-b2xpr" event={"ID":"23c99006-27db-445e-ae39-365ca63ae52e","Type":"ContainerStarted","Data":"cb912156601ccd1c4f6d592a93b13f550b725bf5935cbac1d972983351b61205"} Sep 29 09:31:44 crc kubenswrapper[4779]: I0929 09:31:44.689826 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xfn5c" event={"ID":"a5e4b9fb-8a86-4ab6-8d80-7007127d1b1a","Type":"ContainerStarted","Data":"c01c0e237c1398be2242199f53f3fe4dc4ef2cdb3ad11ca43e312b47e8a1dc0a"} Sep 29 09:31:44 crc kubenswrapper[4779]: I0929 09:31:44.706860 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-zfpqh" event={"ID":"61a2958b-98ca-4304-956a-5995bdda407d","Type":"ContainerStarted","Data":"c01dd551acc55c24411c3387d9d339c7fa02a0dda75367ec6b766ab418f6bcd7"} Sep 29 09:31:44 crc kubenswrapper[4779]: I0929 09:31:44.706931 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-zfpqh" event={"ID":"61a2958b-98ca-4304-956a-5995bdda407d","Type":"ContainerStarted","Data":"51bb814e4670741bea13105189b90f83ecf202bb1714010bbd186c0d5c0065b9"} Sep 29 09:31:44 crc kubenswrapper[4779]: I0929 09:31:44.733644 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 09:31:44 crc kubenswrapper[4779]: I0929 09:31:44.736615 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-wbk58" event={"ID":"cd78a50e-e3f8-4518-9b93-d8a8d8a08df9","Type":"ContainerStarted","Data":"e48a610de5a064393dac157cc498e72f8bb0347201a9a73ce02a0d6e5c75a520"} Sep 29 09:31:44 crc kubenswrapper[4779]: E0929 09:31:44.736743 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 09:31:45.23672322 +0000 UTC m=+137.218047184 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:44 crc kubenswrapper[4779]: I0929 09:31:44.750106 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-fq82t" podStartSLOduration=116.750090689 podStartE2EDuration="1m56.750090689s" podCreationTimestamp="2025-09-29 09:29:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:31:44.716684755 +0000 UTC m=+136.698008659" watchObservedRunningTime="2025-09-29 09:31:44.750090689 +0000 UTC m=+136.731414593" Sep 29 09:31:44 crc kubenswrapper[4779]: I0929 09:31:44.773256 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-dkmtc"] Sep 29 09:31:44 crc kubenswrapper[4779]: I0929 09:31:44.828211 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-jvmch"] Sep 29 09:31:44 crc kubenswrapper[4779]: I0929 09:31:44.838602 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rzc4j\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:44 crc kubenswrapper[4779]: E0929 09:31:44.838850 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 09:31:45.338838709 +0000 UTC m=+137.320162613 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rzc4j" (UID: "5f395791-fcf5-4602-903b-06c24127b40e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:44 crc kubenswrapper[4779]: I0929 09:31:44.930675 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-j6lzv" podStartSLOduration=116.930648474 podStartE2EDuration="1m56.930648474s" podCreationTimestamp="2025-09-29 09:29:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:31:44.926398493 +0000 UTC m=+136.907722397" watchObservedRunningTime="2025-09-29 09:31:44.930648474 +0000 UTC m=+136.911972378" Sep 29 09:31:44 crc kubenswrapper[4779]: I0929 09:31:44.940780 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 09:31:44 crc kubenswrapper[4779]: E0929 09:31:44.943492 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 09:31:45.443470607 +0000 UTC m=+137.424794511 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:44 crc kubenswrapper[4779]: I0929 09:31:44.953379 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rzc4j\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:44 crc kubenswrapper[4779]: E0929 09:31:44.953681 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 09:31:45.453669369 +0000 UTC m=+137.434993273 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rzc4j" (UID: "5f395791-fcf5-4602-903b-06c24127b40e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:45 crc kubenswrapper[4779]: I0929 09:31:45.054646 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 09:31:45 crc kubenswrapper[4779]: E0929 09:31:45.055272 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 09:31:45.555253943 +0000 UTC m=+137.536577847 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:45 crc kubenswrapper[4779]: I0929 09:31:45.156762 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rzc4j\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:45 crc kubenswrapper[4779]: E0929 09:31:45.157043 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 09:31:45.657030253 +0000 UTC m=+137.638354157 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rzc4j" (UID: "5f395791-fcf5-4602-903b-06c24127b40e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:45 crc kubenswrapper[4779]: I0929 09:31:45.257609 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 09:31:45 crc kubenswrapper[4779]: E0929 09:31:45.257896 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 09:31:45.757882544 +0000 UTC m=+137.739206448 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:45 crc kubenswrapper[4779]: I0929 09:31:45.359963 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rzc4j\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:45 crc kubenswrapper[4779]: E0929 09:31:45.360364 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 09:31:45.860351875 +0000 UTC m=+137.841675779 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rzc4j" (UID: "5f395791-fcf5-4602-903b-06c24127b40e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:45 crc kubenswrapper[4779]: I0929 09:31:45.385820 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-jngcm"] Sep 29 09:31:45 crc kubenswrapper[4779]: I0929 09:31:45.399169 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4m9cx" podStartSLOduration=117.399157244 podStartE2EDuration="1m57.399157244s" podCreationTimestamp="2025-09-29 09:29:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:31:45.398827544 +0000 UTC m=+137.380151448" watchObservedRunningTime="2025-09-29 09:31:45.399157244 +0000 UTC m=+137.380481148" Sep 29 09:31:45 crc kubenswrapper[4779]: I0929 09:31:45.442159 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-7zdlq" podStartSLOduration=118.44209442 podStartE2EDuration="1m58.44209442s" podCreationTimestamp="2025-09-29 09:29:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:31:45.441021157 +0000 UTC m=+137.422345061" watchObservedRunningTime="2025-09-29 09:31:45.44209442 +0000 UTC m=+137.423418324" Sep 29 09:31:45 crc kubenswrapper[4779]: I0929 09:31:45.468216 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 09:31:45 crc kubenswrapper[4779]: E0929 09:31:45.468606 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 09:31:45.968587512 +0000 UTC m=+137.949911416 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:45 crc kubenswrapper[4779]: I0929 09:31:45.488617 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-dvvh4" podStartSLOduration=118.488597985 podStartE2EDuration="1m58.488597985s" podCreationTimestamp="2025-09-29 09:29:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:31:45.479018042 +0000 UTC m=+137.460341966" watchObservedRunningTime="2025-09-29 09:31:45.488597985 +0000 UTC m=+137.469921889" Sep 29 09:31:45 crc kubenswrapper[4779]: W0929 09:31:45.504600 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbf4e3cb9_5686_44c9_ad62_1c4bd7e44d8a.slice/crio-149a10a06044e4cdcd985926b227c8cc4efc7b29ead619a434f37f59bd021456 WatchSource:0}: Error finding container 149a10a06044e4cdcd985926b227c8cc4efc7b29ead619a434f37f59bd021456: Status 404 returned error can't find the container with id 149a10a06044e4cdcd985926b227c8cc4efc7b29ead619a434f37f59bd021456 Sep 29 09:31:45 crc kubenswrapper[4779]: I0929 09:31:45.541445 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-dnrj5" podStartSLOduration=117.541421005 podStartE2EDuration="1m57.541421005s" podCreationTimestamp="2025-09-29 09:29:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:31:45.525325961 +0000 UTC m=+137.506649865" watchObservedRunningTime="2025-09-29 09:31:45.541421005 +0000 UTC m=+137.522744909" Sep 29 09:31:45 crc kubenswrapper[4779]: I0929 09:31:45.546137 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-sfkpc"] Sep 29 09:31:45 crc kubenswrapper[4779]: I0929 09:31:45.557118 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2tdjp"] Sep 29 09:31:45 crc kubenswrapper[4779]: I0929 09:31:45.564374 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-jx6p6"] Sep 29 09:31:45 crc kubenswrapper[4779]: I0929 09:31:45.569859 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rzc4j\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:45 crc kubenswrapper[4779]: E0929 09:31:45.570580 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 09:31:46.070566408 +0000 UTC m=+138.051890312 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rzc4j" (UID: "5f395791-fcf5-4602-903b-06c24127b40e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:45 crc kubenswrapper[4779]: I0929 09:31:45.574744 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44nv6" podStartSLOduration=117.574721945 podStartE2EDuration="1m57.574721945s" podCreationTimestamp="2025-09-29 09:29:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:31:45.559750556 +0000 UTC m=+137.541074460" watchObservedRunningTime="2025-09-29 09:31:45.574721945 +0000 UTC m=+137.556045859" Sep 29 09:31:45 crc kubenswrapper[4779]: I0929 09:31:45.584006 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bbsrp"] Sep 29 09:31:45 crc kubenswrapper[4779]: I0929 09:31:45.594050 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-vkr5d"] Sep 29 09:31:45 crc kubenswrapper[4779]: W0929 09:31:45.631934 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5cc982ee_40a1_4ac8_a936_a626ee218633.slice/crio-3cc5893f2f48e7beac48a25fe54b6a2fc1c296c69f60726774653cbaf739d187 WatchSource:0}: Error finding container 3cc5893f2f48e7beac48a25fe54b6a2fc1c296c69f60726774653cbaf739d187: Status 404 returned error can't find the container with id 3cc5893f2f48e7beac48a25fe54b6a2fc1c296c69f60726774653cbaf739d187 Sep 29 09:31:45 crc kubenswrapper[4779]: W0929 09:31:45.634795 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6161b6da_ce56_4639_be44_28e54239bce0.slice/crio-6e8ce0d29128b76449e3dce26ef9afcc1249d1108624d8969ef68b98c83a8102 WatchSource:0}: Error finding container 6e8ce0d29128b76449e3dce26ef9afcc1249d1108624d8969ef68b98c83a8102: Status 404 returned error can't find the container with id 6e8ce0d29128b76449e3dce26ef9afcc1249d1108624d8969ef68b98c83a8102 Sep 29 09:31:45 crc kubenswrapper[4779]: I0929 09:31:45.642974 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29318970-52fch"] Sep 29 09:31:45 crc kubenswrapper[4779]: I0929 09:31:45.670826 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 09:31:45 crc kubenswrapper[4779]: E0929 09:31:45.671022 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 09:31:46.170999546 +0000 UTC m=+138.152323460 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:45 crc kubenswrapper[4779]: I0929 09:31:45.671348 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rzc4j\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:45 crc kubenswrapper[4779]: E0929 09:31:45.671756 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 09:31:46.171747489 +0000 UTC m=+138.153071393 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rzc4j" (UID: "5f395791-fcf5-4602-903b-06c24127b40e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:45 crc kubenswrapper[4779]: I0929 09:31:45.743388 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qwpch"] Sep 29 09:31:45 crc kubenswrapper[4779]: I0929 09:31:45.763656 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-xhrzk"] Sep 29 09:31:45 crc kubenswrapper[4779]: I0929 09:31:45.764426 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-tcl8h" event={"ID":"997b01e5-23d3-47fe-953f-ca0100bdb0b6","Type":"ContainerStarted","Data":"47097b01e7479cfdb79954a3158d10605d349e3bd61b874e99867468ca7ffc78"} Sep 29 09:31:45 crc kubenswrapper[4779]: I0929 09:31:45.770055 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-b2xpr" event={"ID":"23c99006-27db-445e-ae39-365ca63ae52e","Type":"ContainerStarted","Data":"459654c170114c858fc21b6b51f74bf9210015627b810701f41b14f809f8aa38"} Sep 29 09:31:45 crc kubenswrapper[4779]: I0929 09:31:45.770427 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-b2xpr" Sep 29 09:31:45 crc kubenswrapper[4779]: I0929 09:31:45.773409 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 09:31:45 crc kubenswrapper[4779]: E0929 09:31:45.773837 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 09:31:46.273822498 +0000 UTC m=+138.255146392 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:45 crc kubenswrapper[4779]: I0929 09:31:45.777676 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4xmkd" event={"ID":"e63abe9a-f86e-4dfa-9ddc-3a141ccff375","Type":"ContainerStarted","Data":"2f5dfae3eaba80194dda9b24d8a6243a28e15eab30ce5ed3005f36fe69c87fb2"} Sep 29 09:31:45 crc kubenswrapper[4779]: I0929 09:31:45.778099 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4xmkd" Sep 29 09:31:45 crc kubenswrapper[4779]: I0929 09:31:45.779478 4779 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-4xmkd container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.39:5443/healthz\": dial tcp 10.217.0.39:5443: connect: connection refused" start-of-body= Sep 29 09:31:45 crc kubenswrapper[4779]: I0929 09:31:45.779535 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4xmkd" podUID="e63abe9a-f86e-4dfa-9ddc-3a141ccff375" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.39:5443/healthz\": dial tcp 10.217.0.39:5443: connect: connection refused" Sep 29 09:31:45 crc kubenswrapper[4779]: I0929 09:31:45.781241 4779 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-b2xpr container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.20:8443/healthz\": dial tcp 10.217.0.20:8443: connect: connection refused" start-of-body= Sep 29 09:31:45 crc kubenswrapper[4779]: I0929 09:31:45.781284 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-b2xpr" podUID="23c99006-27db-445e-ae39-365ca63ae52e" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.20:8443/healthz\": dial tcp 10.217.0.20:8443: connect: connection refused" Sep 29 09:31:45 crc kubenswrapper[4779]: I0929 09:31:45.794116 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-blxvw"] Sep 29 09:31:45 crc kubenswrapper[4779]: I0929 09:31:45.799329 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-b2xpr" podStartSLOduration=117.799313869 podStartE2EDuration="1m57.799313869s" podCreationTimestamp="2025-09-29 09:29:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:31:45.797874065 +0000 UTC m=+137.779197979" watchObservedRunningTime="2025-09-29 09:31:45.799313869 +0000 UTC m=+137.780637773" Sep 29 09:31:45 crc kubenswrapper[4779]: I0929 09:31:45.800295 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-4sjxm" event={"ID":"23db3d43-22c2-4425-944c-201c682f382d","Type":"ContainerStarted","Data":"f59707b3f7cfc4a31fb4c0b8e15b0faa813fc3d11187cd494fdd048ac864cd29"} Sep 29 09:31:45 crc kubenswrapper[4779]: I0929 09:31:45.839802 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-wbk58" event={"ID":"cd78a50e-e3f8-4518-9b93-d8a8d8a08df9","Type":"ContainerStarted","Data":"c8200eb8f57ad5a4bf65dc3f38b8871b91d473683c4661e276f53ce660ba7dda"} Sep 29 09:31:45 crc kubenswrapper[4779]: I0929 09:31:45.840578 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4xmkd" podStartSLOduration=117.840411779 podStartE2EDuration="1m57.840411779s" podCreationTimestamp="2025-09-29 09:29:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:31:45.837434888 +0000 UTC m=+137.818758782" watchObservedRunningTime="2025-09-29 09:31:45.840411779 +0000 UTC m=+137.821735683" Sep 29 09:31:45 crc kubenswrapper[4779]: I0929 09:31:45.843955 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jvmch" event={"ID":"3d47c69e-9ca4-4bd5-a362-3c0051825a7e","Type":"ContainerStarted","Data":"e5d680bdb57ffc6900890ba2312cc92912df477b49bddf0891920422573e0c81"} Sep 29 09:31:45 crc kubenswrapper[4779]: I0929 09:31:45.859420 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-dkmtc" event={"ID":"cb9c789a-4f39-46ee-8e61-5e39d65daf38","Type":"ContainerStarted","Data":"fd1c20a726a5ad6aafc038bde020c7bcf11cd8cb9bca1a6e277cfe6fcd095969"} Sep 29 09:31:45 crc kubenswrapper[4779]: I0929 09:31:45.867156 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-sfkpc" event={"ID":"2e7822b0-05f5-44bc-aeb2-2fa46992e016","Type":"ContainerStarted","Data":"ed1d85d174cb7d0e1c62c014ba628a2617dfda9dd75088e1578a63b38f356979"} Sep 29 09:31:45 crc kubenswrapper[4779]: I0929 09:31:45.875048 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rzc4j\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:45 crc kubenswrapper[4779]: E0929 09:31:45.876475 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 09:31:46.376462864 +0000 UTC m=+138.357786768 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rzc4j" (UID: "5f395791-fcf5-4602-903b-06c24127b40e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:45 crc kubenswrapper[4779]: I0929 09:31:45.896929 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-vkr5d" event={"ID":"c167fa4c-07a4-4a57-8fc0-a56b59c54ee6","Type":"ContainerStarted","Data":"e0285d7a01a49d2575a19f5d4f7129226f4cda5675001534a3c11d0b95f846b2"} Sep 29 09:31:45 crc kubenswrapper[4779]: I0929 09:31:45.900114 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-jx6p6" event={"ID":"5cc982ee-40a1-4ac8-a936-a626ee218633","Type":"ContainerStarted","Data":"3cc5893f2f48e7beac48a25fe54b6a2fc1c296c69f60726774653cbaf739d187"} Sep 29 09:31:45 crc kubenswrapper[4779]: I0929 09:31:45.918981 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-b8g42"] Sep 29 09:31:45 crc kubenswrapper[4779]: I0929 09:31:45.928371 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-xqgjv" event={"ID":"2bdac139-9c3d-4490-b350-b0513395a281","Type":"ContainerStarted","Data":"ac6522f4e033ed80136ea47145ff54d5c4940e1138c595a0c794327f1c18d80f"} Sep 29 09:31:45 crc kubenswrapper[4779]: I0929 09:31:45.934649 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-djlds" event={"ID":"580e28be-3dc6-4a6d-ab7f-3d89bae5ed8f","Type":"ContainerStarted","Data":"aa5a52748392f7f13e3892cbbddcc8087c7cecaf1a0493fe5187be4a4a038422"} Sep 29 09:31:45 crc kubenswrapper[4779]: I0929 09:31:45.937828 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bp6p9" event={"ID":"b9929052-c8a6-48a6-9520-9b8f4dc396e0","Type":"ContainerStarted","Data":"1fb19bdaf46828f6d990cb68e9b4d50a3eec947b7360d8cf267e4332f7d3426b"} Sep 29 09:31:45 crc kubenswrapper[4779]: I0929 09:31:45.939341 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44nv6" event={"ID":"4a3ce3bd-7194-4b10-9788-2b570b1814c0","Type":"ContainerStarted","Data":"299933c1c308235c94b97be82ae9c12689b0552ed6803389985de52dbb1dce2d"} Sep 29 09:31:45 crc kubenswrapper[4779]: I0929 09:31:45.950641 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-8gbfr" event={"ID":"b61fd91c-c774-44fd-9d5e-114aa59a1b39","Type":"ContainerStarted","Data":"7439234fcab4979503e8f6767c413a80d89736e389849f65bbad9a1b2b0a59a8"} Sep 29 09:31:45 crc kubenswrapper[4779]: I0929 09:31:45.952576 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cscv9" event={"ID":"4872deec-335d-4079-8981-1db7ef98b710","Type":"ContainerStarted","Data":"f4557cc7b599bf10e5d7c307c0dd672118bc3e7842bd6e3805105a284333ed85"} Sep 29 09:31:45 crc kubenswrapper[4779]: I0929 09:31:45.955851 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-m9sp2" event={"ID":"e537e250-466e-4019-a6bf-57b8301b954e","Type":"ContainerStarted","Data":"595beece421527ff4d97b88cf24d6405614fa8b5416b03e879676dedc85309ea"} Sep 29 09:31:45 crc kubenswrapper[4779]: I0929 09:31:45.956366 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-m9sp2" Sep 29 09:31:45 crc kubenswrapper[4779]: I0929 09:31:45.957361 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bbsrp" event={"ID":"6161b6da-ce56-4639-be44-28e54239bce0","Type":"ContainerStarted","Data":"6e8ce0d29128b76449e3dce26ef9afcc1249d1108624d8969ef68b98c83a8102"} Sep 29 09:31:45 crc kubenswrapper[4779]: I0929 09:31:45.959579 4779 patch_prober.go:28] interesting pod/console-operator-58897d9998-m9sp2 container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.12:8443/readyz\": dial tcp 10.217.0.12:8443: connect: connection refused" start-of-body= Sep 29 09:31:45 crc kubenswrapper[4779]: I0929 09:31:45.959785 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-m9sp2" podUID="e537e250-466e-4019-a6bf-57b8301b954e" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.12:8443/readyz\": dial tcp 10.217.0.12:8443: connect: connection refused" Sep 29 09:31:45 crc kubenswrapper[4779]: I0929 09:31:45.978109 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-8gbfr" podStartSLOduration=117.978091659 podStartE2EDuration="1m57.978091659s" podCreationTimestamp="2025-09-29 09:29:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:31:45.976641305 +0000 UTC m=+137.957965219" watchObservedRunningTime="2025-09-29 09:31:45.978091659 +0000 UTC m=+137.959415563" Sep 29 09:31:45 crc kubenswrapper[4779]: I0929 09:31:45.983961 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 09:31:45 crc kubenswrapper[4779]: E0929 09:31:45.984328 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 09:31:46.48430643 +0000 UTC m=+138.465630334 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:45 crc kubenswrapper[4779]: I0929 09:31:45.995886 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-qpmlh" event={"ID":"d48d8d73-7021-4ebd-8321-b7f73e330c1c","Type":"ContainerStarted","Data":"d09c189134fdef5bb31515a8225b205b4cc383524a57421e15b5d638e8c96a52"} Sep 29 09:31:45 crc kubenswrapper[4779]: I0929 09:31:45.996254 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-qpmlh" Sep 29 09:31:45 crc kubenswrapper[4779]: I0929 09:31:45.999014 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-jngcm" event={"ID":"bf4e3cb9-5686-44c9-ad62-1c4bd7e44d8a","Type":"ContainerStarted","Data":"149a10a06044e4cdcd985926b227c8cc4efc7b29ead619a434f37f59bd021456"} Sep 29 09:31:46 crc kubenswrapper[4779]: I0929 09:31:46.002611 4779 patch_prober.go:28] interesting pod/downloads-7954f5f757-qpmlh container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Sep 29 09:31:46 crc kubenswrapper[4779]: I0929 09:31:46.002675 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-qpmlh" podUID="d48d8d73-7021-4ebd-8321-b7f73e330c1c" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Sep 29 09:31:46 crc kubenswrapper[4779]: I0929 09:31:46.008806 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cscv9" podStartSLOduration=118.00877919 podStartE2EDuration="1m58.00877919s" podCreationTimestamp="2025-09-29 09:29:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:31:46.006225371 +0000 UTC m=+137.987549275" watchObservedRunningTime="2025-09-29 09:31:46.00877919 +0000 UTC m=+137.990103094" Sep 29 09:31:46 crc kubenswrapper[4779]: W0929 09:31:46.013826 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podce544002_58ff_46a0_a565_291a6dd31673.slice/crio-4a856273f04120756e181defc3301ad8da27b647bb6ea6e18da32c21b7fe7100 WatchSource:0}: Error finding container 4a856273f04120756e181defc3301ad8da27b647bb6ea6e18da32c21b7fe7100: Status 404 returned error can't find the container with id 4a856273f04120756e181defc3301ad8da27b647bb6ea6e18da32c21b7fe7100 Sep 29 09:31:46 crc kubenswrapper[4779]: I0929 09:31:46.015501 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-f4jjc" event={"ID":"8a39cfe4-4366-4e3c-81a1-5a0694840afb","Type":"ContainerStarted","Data":"e711cccda9bf8c565ae5d54372d7a10119ee4af987cad20bef661970761d85c6"} Sep 29 09:31:46 crc kubenswrapper[4779]: I0929 09:31:46.029373 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-m9sp2" podStartSLOduration=118.02935323 podStartE2EDuration="1m58.02935323s" podCreationTimestamp="2025-09-29 09:29:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:31:46.028407461 +0000 UTC m=+138.009731375" watchObservedRunningTime="2025-09-29 09:31:46.02935323 +0000 UTC m=+138.010677134" Sep 29 09:31:46 crc kubenswrapper[4779]: I0929 09:31:46.032023 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-sp4jc" event={"ID":"6d3d9d05-b7a1-45ef-86a5-edc7d2ba3903","Type":"ContainerStarted","Data":"7a78a44bee9395d6badb76cb6535755fb651d8eb7af25525617e5e221e919b3d"} Sep 29 09:31:46 crc kubenswrapper[4779]: I0929 09:31:46.050006 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-dnrj5" event={"ID":"c01a0586-dcf7-41cb-a48f-a593e4436d7a","Type":"ContainerStarted","Data":"7c9aca4813a43a7914074e7a8ca869b2123fb66f1e854bcd63e3bfe279a2f77e"} Sep 29 09:31:46 crc kubenswrapper[4779]: I0929 09:31:46.054696 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-f4jjc" podStartSLOduration=118.054683717 podStartE2EDuration="1m58.054683717s" podCreationTimestamp="2025-09-29 09:29:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:31:46.054154201 +0000 UTC m=+138.035478105" watchObservedRunningTime="2025-09-29 09:31:46.054683717 +0000 UTC m=+138.036007621" Sep 29 09:31:46 crc kubenswrapper[4779]: I0929 09:31:46.057279 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xfn5c" event={"ID":"a5e4b9fb-8a86-4ab6-8d80-7007127d1b1a","Type":"ContainerStarted","Data":"206b4708309bd54b1c129d3977d5a0d4eaca3127ed1b94b72854ecf14012ad03"} Sep 29 09:31:46 crc kubenswrapper[4779]: I0929 09:31:46.080049 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mvxx7" event={"ID":"1c0ae796-2abb-42c7-bbef-9ddbd496420a","Type":"ContainerStarted","Data":"a267d0e84676508913530d0c3d0b48b03582f95f79ee3bdd4c42e106858dfa7f"} Sep 29 09:31:46 crc kubenswrapper[4779]: I0929 09:31:46.082460 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-v6825" event={"ID":"67e6a9c5-a695-4683-9df7-cab5bace357c","Type":"ContainerStarted","Data":"f6c9ea55a78028a68432e9cf4b70c605eda3e22ba05eb74122488971d79cce22"} Sep 29 09:31:46 crc kubenswrapper[4779]: I0929 09:31:46.083210 4779 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-dvvh4 container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.8:6443/healthz\": dial tcp 10.217.0.8:6443: connect: connection refused" start-of-body= Sep 29 09:31:46 crc kubenswrapper[4779]: I0929 09:31:46.083251 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-dvvh4" podUID="2118e02f-c7d4-4def-a9f1-ee9d81b6408f" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.8:6443/healthz\": dial tcp 10.217.0.8:6443: connect: connection refused" Sep 29 09:31:46 crc kubenswrapper[4779]: I0929 09:31:46.089149 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rzc4j\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:46 crc kubenswrapper[4779]: E0929 09:31:46.091342 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 09:31:46.59132691 +0000 UTC m=+138.572650814 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rzc4j" (UID: "5f395791-fcf5-4602-903b-06c24127b40e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:46 crc kubenswrapper[4779]: I0929 09:31:46.093062 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-qpmlh" podStartSLOduration=118.093047153 podStartE2EDuration="1m58.093047153s" podCreationTimestamp="2025-09-29 09:29:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:31:46.092990351 +0000 UTC m=+138.074314255" watchObservedRunningTime="2025-09-29 09:31:46.093047153 +0000 UTC m=+138.074371057" Sep 29 09:31:46 crc kubenswrapper[4779]: I0929 09:31:46.095966 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4m9cx" Sep 29 09:31:46 crc kubenswrapper[4779]: I0929 09:31:46.140679 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-sp4jc" podStartSLOduration=6.140660102 podStartE2EDuration="6.140660102s" podCreationTimestamp="2025-09-29 09:31:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:31:46.117652027 +0000 UTC m=+138.098975951" watchObservedRunningTime="2025-09-29 09:31:46.140660102 +0000 UTC m=+138.121984006" Sep 29 09:31:46 crc kubenswrapper[4779]: I0929 09:31:46.177793 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mvxx7" podStartSLOduration=118.17776969 podStartE2EDuration="1m58.17776969s" podCreationTimestamp="2025-09-29 09:29:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:31:46.14515249 +0000 UTC m=+138.126476404" watchObservedRunningTime="2025-09-29 09:31:46.17776969 +0000 UTC m=+138.159093594" Sep 29 09:31:46 crc kubenswrapper[4779]: I0929 09:31:46.190103 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 09:31:46 crc kubenswrapper[4779]: E0929 09:31:46.191714 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 09:31:46.691690926 +0000 UTC m=+138.673014840 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:46 crc kubenswrapper[4779]: I0929 09:31:46.208994 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-v6825" podStartSLOduration=119.208978776 podStartE2EDuration="1m59.208978776s" podCreationTimestamp="2025-09-29 09:29:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:31:46.177004386 +0000 UTC m=+138.158328290" watchObservedRunningTime="2025-09-29 09:31:46.208978776 +0000 UTC m=+138.190302680" Sep 29 09:31:46 crc kubenswrapper[4779]: I0929 09:31:46.295318 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rzc4j\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:46 crc kubenswrapper[4779]: E0929 09:31:46.295941 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 09:31:46.795922651 +0000 UTC m=+138.777246555 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rzc4j" (UID: "5f395791-fcf5-4602-903b-06c24127b40e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:46 crc kubenswrapper[4779]: I0929 09:31:46.399490 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 09:31:46 crc kubenswrapper[4779]: E0929 09:31:46.399714 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 09:31:46.899691292 +0000 UTC m=+138.881015196 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:46 crc kubenswrapper[4779]: I0929 09:31:46.399801 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rzc4j\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:46 crc kubenswrapper[4779]: E0929 09:31:46.400211 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 09:31:46.900195917 +0000 UTC m=+138.881519821 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rzc4j" (UID: "5f395791-fcf5-4602-903b-06c24127b40e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:46 crc kubenswrapper[4779]: I0929 09:31:46.500846 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 09:31:46 crc kubenswrapper[4779]: E0929 09:31:46.501233 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 09:31:47.001205533 +0000 UTC m=+138.982529437 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:46 crc kubenswrapper[4779]: I0929 09:31:46.501536 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rzc4j\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:46 crc kubenswrapper[4779]: E0929 09:31:46.501782 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 09:31:47.001771061 +0000 UTC m=+138.983094965 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rzc4j" (UID: "5f395791-fcf5-4602-903b-06c24127b40e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:46 crc kubenswrapper[4779]: I0929 09:31:46.532083 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-f4jjc" Sep 29 09:31:46 crc kubenswrapper[4779]: I0929 09:31:46.534397 4779 patch_prober.go:28] interesting pod/router-default-5444994796-f4jjc container/router namespace/openshift-ingress: Startup probe status=failure output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" start-of-body= Sep 29 09:31:46 crc kubenswrapper[4779]: I0929 09:31:46.534444 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-f4jjc" podUID="8a39cfe4-4366-4e3c-81a1-5a0694840afb" containerName="router" probeResult="failure" output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" Sep 29 09:31:46 crc kubenswrapper[4779]: I0929 09:31:46.602015 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 09:31:46 crc kubenswrapper[4779]: E0929 09:31:46.602436 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 09:31:47.102421336 +0000 UTC m=+139.083745240 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:46 crc kubenswrapper[4779]: I0929 09:31:46.704098 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rzc4j\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:46 crc kubenswrapper[4779]: E0929 09:31:46.704477 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 09:31:47.204459903 +0000 UTC m=+139.185783807 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rzc4j" (UID: "5f395791-fcf5-4602-903b-06c24127b40e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:46 crc kubenswrapper[4779]: I0929 09:31:46.814429 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 09:31:46 crc kubenswrapper[4779]: E0929 09:31:46.815730 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 09:31:47.315713193 +0000 UTC m=+139.297037097 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:46 crc kubenswrapper[4779]: I0929 09:31:46.866680 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-zhwns" Sep 29 09:31:46 crc kubenswrapper[4779]: I0929 09:31:46.867205 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-zhwns" Sep 29 09:31:46 crc kubenswrapper[4779]: I0929 09:31:46.880318 4779 patch_prober.go:28] interesting pod/apiserver-76f77b778f-zhwns container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Sep 29 09:31:46 crc kubenswrapper[4779]: [+]log ok Sep 29 09:31:46 crc kubenswrapper[4779]: [+]etcd ok Sep 29 09:31:46 crc kubenswrapper[4779]: [+]poststarthook/start-apiserver-admission-initializer ok Sep 29 09:31:46 crc kubenswrapper[4779]: [+]poststarthook/generic-apiserver-start-informers ok Sep 29 09:31:46 crc kubenswrapper[4779]: [+]poststarthook/max-in-flight-filter ok Sep 29 09:31:46 crc kubenswrapper[4779]: [+]poststarthook/storage-object-count-tracker-hook ok Sep 29 09:31:46 crc kubenswrapper[4779]: [+]poststarthook/image.openshift.io-apiserver-caches ok Sep 29 09:31:46 crc kubenswrapper[4779]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Sep 29 09:31:46 crc kubenswrapper[4779]: [+]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa ok Sep 29 09:31:46 crc kubenswrapper[4779]: [+]poststarthook/project.openshift.io-projectcache ok Sep 29 09:31:46 crc kubenswrapper[4779]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Sep 29 09:31:46 crc kubenswrapper[4779]: [+]poststarthook/openshift.io-startinformers ok Sep 29 09:31:46 crc kubenswrapper[4779]: [+]poststarthook/openshift.io-restmapperupdater ok Sep 29 09:31:46 crc kubenswrapper[4779]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Sep 29 09:31:46 crc kubenswrapper[4779]: livez check failed Sep 29 09:31:46 crc kubenswrapper[4779]: I0929 09:31:46.880399 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-zhwns" podUID="f420d1df-5091-46b0-be80-9a83e5be1a65" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 09:31:46 crc kubenswrapper[4779]: I0929 09:31:46.916411 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rzc4j\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:46 crc kubenswrapper[4779]: E0929 09:31:46.916808 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 09:31:47.416787821 +0000 UTC m=+139.398111725 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rzc4j" (UID: "5f395791-fcf5-4602-903b-06c24127b40e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.018265 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 09:31:47 crc kubenswrapper[4779]: E0929 09:31:47.018438 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 09:31:47.518414616 +0000 UTC m=+139.499738520 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.018789 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rzc4j\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:47 crc kubenswrapper[4779]: E0929 09:31:47.020537 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 09:31:47.520529091 +0000 UTC m=+139.501852995 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rzc4j" (UID: "5f395791-fcf5-4602-903b-06c24127b40e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.088167 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-sfkpc" event={"ID":"2e7822b0-05f5-44bc-aeb2-2fa46992e016","Type":"ContainerStarted","Data":"9d9256dec67e72968d70f04c7ed071abcdb5585b368eb626bcae785b110db453"} Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.098822 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-djlds" event={"ID":"580e28be-3dc6-4a6d-ab7f-3d89bae5ed8f","Type":"ContainerStarted","Data":"42b9d0f5359f1951613738c7b38486ec94e88cc85927de1bc5f4593cab491bf8"} Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.100888 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bp6p9" event={"ID":"b9929052-c8a6-48a6-9520-9b8f4dc396e0","Type":"ContainerStarted","Data":"268abeb062536705d445877474e4d57c40adc6d6a4444e5eba09e64e1a71dbe4"} Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.102836 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29318970-52fch" event={"ID":"bbb3233c-65ea-4bb4-aaa5-1d80fef4638d","Type":"ContainerStarted","Data":"73b98a46386d9723b0f19c6b761bac75c7313cf4fbae6ab2a6d855c16a45a1c5"} Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.102972 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29318970-52fch" event={"ID":"bbb3233c-65ea-4bb4-aaa5-1d80fef4638d","Type":"ContainerStarted","Data":"c75d25670311e415bcffa080d5c415565eb50ebd831aad77ad15849e17bd8860"} Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.104315 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2tdjp" event={"ID":"c4bc15d8-8992-4e59-b6aa-633433fdce22","Type":"ContainerStarted","Data":"ef8a61123230da1c193538c2c966bbf41d28c5ff1dd074225b66f50abc0367c0"} Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.104425 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2tdjp" event={"ID":"c4bc15d8-8992-4e59-b6aa-633433fdce22","Type":"ContainerStarted","Data":"8e296c28d67dca888fd5488c999517b5f0f3c21bceeafa3526d749b6fede25ff"} Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.106582 4779 generic.go:334] "Generic (PLEG): container finished" podID="cd78a50e-e3f8-4518-9b93-d8a8d8a08df9" containerID="c8200eb8f57ad5a4bf65dc3f38b8871b91d473683c4661e276f53ce660ba7dda" exitCode=0 Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.106643 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-wbk58" event={"ID":"cd78a50e-e3f8-4518-9b93-d8a8d8a08df9","Type":"ContainerDied","Data":"c8200eb8f57ad5a4bf65dc3f38b8871b91d473683c4661e276f53ce660ba7dda"} Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.107469 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44nv6" Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.108769 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44nv6" Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.111287 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-xqgjv" event={"ID":"2bdac139-9c3d-4490-b350-b0513395a281","Type":"ContainerStarted","Data":"72c04c400f768146cdf49939527a0488210a1769040848164f406780ce6b95d1"} Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.112840 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-jx6p6" event={"ID":"5cc982ee-40a1-4ac8-a936-a626ee218633","Type":"ContainerStarted","Data":"fb8d7ba50a6f0331c6ae0f5d04a029ff06a14efc7c42224825a069d100112c73"} Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.112947 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-jx6p6" event={"ID":"5cc982ee-40a1-4ac8-a936-a626ee218633","Type":"ContainerStarted","Data":"2134598c96ae3cf6d331e74b27fba36286747c9fd02a090a97b2f845f3bd689c"} Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.113344 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-jx6p6" Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.114552 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-xhrzk" event={"ID":"4d97870e-816a-415f-a3ad-1ccdf24e035f","Type":"ContainerStarted","Data":"cc435d8542d61a683f466fd71a036fa8a91a570556e34f0c739f2488d3e1c1d8"} Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.114657 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-xhrzk" event={"ID":"4d97870e-816a-415f-a3ad-1ccdf24e035f","Type":"ContainerStarted","Data":"6a6e52359af999a40960b0c07b3069da87ec10acc4294ed8ad80e470bfa09eb0"} Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.116135 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-jngcm" event={"ID":"bf4e3cb9-5686-44c9-ad62-1c4bd7e44d8a","Type":"ContainerStarted","Data":"d356f5040cb017ade5fdc8aa9b1b2d75213727273c37349f466e52390f5a4193"} Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.116735 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-jngcm" Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.117930 4779 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-jngcm container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" start-of-body= Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.118118 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-jngcm" podUID="bf4e3cb9-5686-44c9-ad62-1c4bd7e44d8a" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.118458 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-sp4jc" event={"ID":"6d3d9d05-b7a1-45ef-86a5-edc7d2ba3903","Type":"ContainerStarted","Data":"837e7b71b94bcffd47e0ba989183c8d6a2f5ac329b83c6b09ba28dbba30a7bec"} Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.119782 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 09:31:47 crc kubenswrapper[4779]: E0929 09:31:47.120500 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 09:31:47.620483385 +0000 UTC m=+139.601807289 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.123476 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qwpch" event={"ID":"b59a8e7c-0cc5-4ea8-ac5f-2d80959f8933","Type":"ContainerStarted","Data":"3ca0a78b4db6e95b893c7a857c5845f12877d5e7e616d97e75d653fae2113fbe"} Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.123516 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qwpch" event={"ID":"b59a8e7c-0cc5-4ea8-ac5f-2d80959f8933","Type":"ContainerStarted","Data":"2000fe13a2f02a55c42b1539aec47a10b397680fdc5de8960a9dfb62ef6a634e"} Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.123526 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qwpch" event={"ID":"b59a8e7c-0cc5-4ea8-ac5f-2d80959f8933","Type":"ContainerStarted","Data":"64b1f7c22b1c454a242e493d89ce1e52bde4c5ca329d4fc485a54f3adb0a47fb"} Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.123980 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qwpch" Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.125526 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-tcl8h" event={"ID":"997b01e5-23d3-47fe-953f-ca0100bdb0b6","Type":"ContainerStarted","Data":"e800132620bb4a1351a4da046e357ba17109a10b1b6fe010932fe4c1ca8ba1d6"} Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.128194 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-fz89h" event={"ID":"b2287b55-49f7-492d-a256-d300d4fee9c8","Type":"ContainerStarted","Data":"df5c577d2f89a7fc1bb139eab2218161aa4fc066363ad617dbfe19ddd355d408"} Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.131152 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bbsrp" event={"ID":"6161b6da-ce56-4639-be44-28e54239bce0","Type":"ContainerStarted","Data":"6ff002e57e59e4a1667550b97029608854c20e6bd3b6de6938fc2bc4b0b87cab"} Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.133464 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-67pmp" event={"ID":"30cc1270-b838-459f-a925-f971ed08b550","Type":"ContainerStarted","Data":"36991ab552738d91a2982808daccf4daa88c9b66ee1301aa41ce159679633145"} Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.135231 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jvmch" event={"ID":"3d47c69e-9ca4-4bd5-a362-3c0051825a7e","Type":"ContainerStarted","Data":"7c57465ed104f29c9f2d2c18e9a41a8523daed9f3524bf295d84e1e115e3a177"} Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.135368 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jvmch" event={"ID":"3d47c69e-9ca4-4bd5-a362-3c0051825a7e","Type":"ContainerStarted","Data":"23b36dd053c88da0dd44f49c70ddc707b2aa2770306cefeaf30666f078e5b989"} Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.138462 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-dkmtc" event={"ID":"cb9c789a-4f39-46ee-8e61-5e39d65daf38","Type":"ContainerStarted","Data":"cd8dca10d6a88af03c12fe6a564136dce54a84bd63370b28324793527bf20679"} Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.140587 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-dkmtc" Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.141543 4779 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-dkmtc container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.35:8080/healthz\": dial tcp 10.217.0.35:8080: connect: connection refused" start-of-body= Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.141796 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-dkmtc" podUID="cb9c789a-4f39-46ee-8e61-5e39d65daf38" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.35:8080/healthz\": dial tcp 10.217.0.35:8080: connect: connection refused" Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.151166 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-djlds" podStartSLOduration=119.151152565 podStartE2EDuration="1m59.151152565s" podCreationTimestamp="2025-09-29 09:29:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:31:47.1500151 +0000 UTC m=+139.131339024" watchObservedRunningTime="2025-09-29 09:31:47.151152565 +0000 UTC m=+139.132476469" Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.174550 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xfn5c" event={"ID":"a5e4b9fb-8a86-4ab6-8d80-7007127d1b1a","Type":"ContainerStarted","Data":"8db3dffdb50c62dbd5d38f7138b722aeeef54cf88ff529cc79d3955de40dfcae"} Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.184323 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bbsrp" podStartSLOduration=119.184308271 podStartE2EDuration="1m59.184308271s" podCreationTimestamp="2025-09-29 09:29:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:31:47.177053289 +0000 UTC m=+139.158377193" watchObservedRunningTime="2025-09-29 09:31:47.184308271 +0000 UTC m=+139.165632165" Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.199314 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-fz89h" podStartSLOduration=119.199299601 podStartE2EDuration="1m59.199299601s" podCreationTimestamp="2025-09-29 09:29:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:31:47.197614269 +0000 UTC m=+139.178938183" watchObservedRunningTime="2025-09-29 09:31:47.199299601 +0000 UTC m=+139.180623505" Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.202723 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-zfpqh" event={"ID":"61a2958b-98ca-4304-956a-5995bdda407d","Type":"ContainerStarted","Data":"b5ca8f0df34298890eb1e84612c2c35b4a466e48c071497389d23570a3811b70"} Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.217676 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-b8g42" event={"ID":"ce544002-58ff-46a0-a565-291a6dd31673","Type":"ContainerStarted","Data":"1a176c9d80017785eb5455f25c7b9954cfdbc45a8166d3d5da385dcadba6e2d4"} Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.217739 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-b8g42" event={"ID":"ce544002-58ff-46a0-a565-291a6dd31673","Type":"ContainerStarted","Data":"4a856273f04120756e181defc3301ad8da27b647bb6ea6e18da32c21b7fe7100"} Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.220564 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-blxvw" event={"ID":"dc8659b1-2788-498e-bc4c-c294328dde71","Type":"ContainerStarted","Data":"fe16c43fa9471a256734189d8b26acb64a52130dbd691b90312cd6e91588a67a"} Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.220592 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-blxvw" event={"ID":"dc8659b1-2788-498e-bc4c-c294328dde71","Type":"ContainerStarted","Data":"44180e74410247dd2f662c224d99a3f08b64092d4b8c0728e893caec48ad08f3"} Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.221481 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rzc4j\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:47 crc kubenswrapper[4779]: E0929 09:31:47.222221 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 09:31:47.722205943 +0000 UTC m=+139.703529847 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rzc4j" (UID: "5f395791-fcf5-4602-903b-06c24127b40e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.232640 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mvxx7" event={"ID":"1c0ae796-2abb-42c7-bbef-9ddbd496420a","Type":"ContainerStarted","Data":"9768b8ecb8a02e8580d83bede9d32d2a2cca0b4c24f6a869cc7366b90b385d79"} Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.241171 4779 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-4xmkd container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.39:5443/healthz\": dial tcp 10.217.0.39:5443: connect: connection refused" start-of-body= Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.241438 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4xmkd" podUID="e63abe9a-f86e-4dfa-9ddc-3a141ccff375" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.39:5443/healthz\": dial tcp 10.217.0.39:5443: connect: connection refused" Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.241494 4779 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-b2xpr container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.20:8443/healthz\": dial tcp 10.217.0.20:8443: connect: connection refused" start-of-body= Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.241506 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-b2xpr" podUID="23c99006-27db-445e-ae39-365ca63ae52e" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.20:8443/healthz\": dial tcp 10.217.0.20:8443: connect: connection refused" Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.241539 4779 patch_prober.go:28] interesting pod/downloads-7954f5f757-qpmlh container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.241551 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-qpmlh" podUID="d48d8d73-7021-4ebd-8321-b7f73e330c1c" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.241788 4779 patch_prober.go:28] interesting pod/console-operator-58897d9998-m9sp2 container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.12:8443/readyz\": dial tcp 10.217.0.12:8443: connect: connection refused" start-of-body= Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.241805 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-m9sp2" podUID="e537e250-466e-4019-a6bf-57b8301b954e" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.12:8443/readyz\": dial tcp 10.217.0.12:8443: connect: connection refused" Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.269056 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-67pmp" podStartSLOduration=119.269035468 podStartE2EDuration="1m59.269035468s" podCreationTimestamp="2025-09-29 09:29:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:31:47.262334143 +0000 UTC m=+139.243658047" watchObservedRunningTime="2025-09-29 09:31:47.269035468 +0000 UTC m=+139.250359372" Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.282195 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2tdjp" podStartSLOduration=119.282176361 podStartE2EDuration="1m59.282176361s" podCreationTimestamp="2025-09-29 09:29:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:31:47.28147924 +0000 UTC m=+139.262803164" watchObservedRunningTime="2025-09-29 09:31:47.282176361 +0000 UTC m=+139.263500265" Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.301763 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jvmch" podStartSLOduration=119.301746461 podStartE2EDuration="1m59.301746461s" podCreationTimestamp="2025-09-29 09:29:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:31:47.300107781 +0000 UTC m=+139.281431695" watchObservedRunningTime="2025-09-29 09:31:47.301746461 +0000 UTC m=+139.283070365" Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.322968 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qwpch" podStartSLOduration=119.322952991 podStartE2EDuration="1m59.322952991s" podCreationTimestamp="2025-09-29 09:29:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:31:47.321425874 +0000 UTC m=+139.302749778" watchObservedRunningTime="2025-09-29 09:31:47.322952991 +0000 UTC m=+139.304276895" Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.324481 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 09:31:47 crc kubenswrapper[4779]: E0929 09:31:47.325797 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 09:31:47.825782608 +0000 UTC m=+139.807106512 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.364129 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bp6p9" podStartSLOduration=119.364091212 podStartE2EDuration="1m59.364091212s" podCreationTimestamp="2025-09-29 09:29:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:31:47.345468661 +0000 UTC m=+139.326792565" watchObservedRunningTime="2025-09-29 09:31:47.364091212 +0000 UTC m=+139.345415116" Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.366068 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-jngcm" podStartSLOduration=119.366060312 podStartE2EDuration="1m59.366060312s" podCreationTimestamp="2025-09-29 09:29:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:31:47.363312808 +0000 UTC m=+139.344636722" watchObservedRunningTime="2025-09-29 09:31:47.366060312 +0000 UTC m=+139.347384216" Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.385675 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-jx6p6" podStartSLOduration=7.385659803 podStartE2EDuration="7.385659803s" podCreationTimestamp="2025-09-29 09:31:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:31:47.384489317 +0000 UTC m=+139.365813221" watchObservedRunningTime="2025-09-29 09:31:47.385659803 +0000 UTC m=+139.366983697" Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.425962 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-xqgjv" podStartSLOduration=119.425943468 podStartE2EDuration="1m59.425943468s" podCreationTimestamp="2025-09-29 09:29:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:31:47.424753621 +0000 UTC m=+139.406077525" watchObservedRunningTime="2025-09-29 09:31:47.425943468 +0000 UTC m=+139.407267372" Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.427716 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rzc4j\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:47 crc kubenswrapper[4779]: E0929 09:31:47.436860 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 09:31:47.936845682 +0000 UTC m=+139.918169586 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rzc4j" (UID: "5f395791-fcf5-4602-903b-06c24127b40e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.453045 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44nv6" Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.461015 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29318970-52fch" podStartSLOduration=107.460994602 podStartE2EDuration="1m47.460994602s" podCreationTimestamp="2025-09-29 09:30:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:31:47.45668417 +0000 UTC m=+139.438008074" watchObservedRunningTime="2025-09-29 09:31:47.460994602 +0000 UTC m=+139.442318506" Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.488290 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-tcl8h" podStartSLOduration=119.488275438 podStartE2EDuration="1m59.488275438s" podCreationTimestamp="2025-09-29 09:29:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:31:47.486636078 +0000 UTC m=+139.467959982" watchObservedRunningTime="2025-09-29 09:31:47.488275438 +0000 UTC m=+139.469599342" Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.507209 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-xhrzk" podStartSLOduration=7.507194068 podStartE2EDuration="7.507194068s" podCreationTimestamp="2025-09-29 09:31:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:31:47.506556889 +0000 UTC m=+139.487880803" watchObservedRunningTime="2025-09-29 09:31:47.507194068 +0000 UTC m=+139.488517972" Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.536278 4779 patch_prober.go:28] interesting pod/router-default-5444994796-f4jjc container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 09:31:47 crc kubenswrapper[4779]: [-]has-synced failed: reason withheld Sep 29 09:31:47 crc kubenswrapper[4779]: [+]process-running ok Sep 29 09:31:47 crc kubenswrapper[4779]: healthz check failed Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.536335 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-f4jjc" podUID="8a39cfe4-4366-4e3c-81a1-5a0694840afb" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.537263 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-dkmtc" podStartSLOduration=119.537248399 podStartE2EDuration="1m59.537248399s" podCreationTimestamp="2025-09-29 09:29:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:31:47.535770624 +0000 UTC m=+139.517094528" watchObservedRunningTime="2025-09-29 09:31:47.537248399 +0000 UTC m=+139.518572303" Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.538702 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 09:31:47 crc kubenswrapper[4779]: E0929 09:31:47.539039 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 09:31:48.039023544 +0000 UTC m=+140.020347448 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.564741 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xfn5c" podStartSLOduration=119.564722211 podStartE2EDuration="1m59.564722211s" podCreationTimestamp="2025-09-29 09:29:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:31:47.562038359 +0000 UTC m=+139.543362263" watchObservedRunningTime="2025-09-29 09:31:47.564722211 +0000 UTC m=+139.546046115" Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.622892 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-blxvw" podStartSLOduration=119.622871744 podStartE2EDuration="1m59.622871744s" podCreationTimestamp="2025-09-29 09:29:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:31:47.594187504 +0000 UTC m=+139.575511409" watchObservedRunningTime="2025-09-29 09:31:47.622871744 +0000 UTC m=+139.604195648" Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.638036 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-zfpqh" podStartSLOduration=119.638019278 podStartE2EDuration="1m59.638019278s" podCreationTimestamp="2025-09-29 09:29:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:31:47.636921444 +0000 UTC m=+139.618245358" watchObservedRunningTime="2025-09-29 09:31:47.638019278 +0000 UTC m=+139.619343182" Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.639816 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rzc4j\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:47 crc kubenswrapper[4779]: E0929 09:31:47.640143 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 09:31:48.140103952 +0000 UTC m=+140.121427846 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rzc4j" (UID: "5f395791-fcf5-4602-903b-06c24127b40e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.673759 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-4sjxm" podStartSLOduration=120.673743973 podStartE2EDuration="2m0.673743973s" podCreationTimestamp="2025-09-29 09:29:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:31:47.672451563 +0000 UTC m=+139.653775467" watchObservedRunningTime="2025-09-29 09:31:47.673743973 +0000 UTC m=+139.655067877" Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.695649 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-b8g42" podStartSLOduration=119.695634274 podStartE2EDuration="1m59.695634274s" podCreationTimestamp="2025-09-29 09:29:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:31:47.695147789 +0000 UTC m=+139.676471683" watchObservedRunningTime="2025-09-29 09:31:47.695634274 +0000 UTC m=+139.676958178" Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.741326 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 09:31:47 crc kubenswrapper[4779]: E0929 09:31:47.741728 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 09:31:48.241713556 +0000 UTC m=+140.223037460 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.843254 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rzc4j\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:47 crc kubenswrapper[4779]: E0929 09:31:47.843757 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 09:31:48.343739223 +0000 UTC m=+140.325063127 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rzc4j" (UID: "5f395791-fcf5-4602-903b-06c24127b40e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:47 crc kubenswrapper[4779]: I0929 09:31:47.945047 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 09:31:47 crc kubenswrapper[4779]: E0929 09:31:47.945520 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 09:31:48.445496232 +0000 UTC m=+140.426820146 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:48 crc kubenswrapper[4779]: I0929 09:31:48.047382 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rzc4j\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:48 crc kubenswrapper[4779]: E0929 09:31:48.047872 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 09:31:48.547850509 +0000 UTC m=+140.529174413 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rzc4j" (UID: "5f395791-fcf5-4602-903b-06c24127b40e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:48 crc kubenswrapper[4779]: I0929 09:31:48.149342 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 09:31:48 crc kubenswrapper[4779]: E0929 09:31:48.149768 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 09:31:48.649742672 +0000 UTC m=+140.631066576 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:48 crc kubenswrapper[4779]: I0929 09:31:48.248977 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-vkr5d" event={"ID":"c167fa4c-07a4-4a57-8fc0-a56b59c54ee6","Type":"ContainerStarted","Data":"640c1574efd1e838d5798bc68e30cc4d2680ee04138d109d14a512b1dcd2d4f0"} Sep 29 09:31:48 crc kubenswrapper[4779]: I0929 09:31:48.251531 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rzc4j\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:48 crc kubenswrapper[4779]: E0929 09:31:48.251970 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 09:31:48.751953425 +0000 UTC m=+140.733277329 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rzc4j" (UID: "5f395791-fcf5-4602-903b-06c24127b40e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:48 crc kubenswrapper[4779]: I0929 09:31:48.255377 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-wbk58" event={"ID":"cd78a50e-e3f8-4518-9b93-d8a8d8a08df9","Type":"ContainerStarted","Data":"c21f6a35add9c414373461760dafb5e1ba04c77e1ff2aa0eb97f2d044abfad6c"} Sep 29 09:31:48 crc kubenswrapper[4779]: I0929 09:31:48.255654 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-wbk58" Sep 29 09:31:48 crc kubenswrapper[4779]: I0929 09:31:48.259267 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-sfkpc" event={"ID":"2e7822b0-05f5-44bc-aeb2-2fa46992e016","Type":"ContainerStarted","Data":"da3383436f8bb260970ac31e2e323b44208aac3d2a03a892fbb13102afbd0a6f"} Sep 29 09:31:48 crc kubenswrapper[4779]: I0929 09:31:48.262727 4779 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-dkmtc container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.35:8080/healthz\": dial tcp 10.217.0.35:8080: connect: connection refused" start-of-body= Sep 29 09:31:48 crc kubenswrapper[4779]: I0929 09:31:48.262776 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-dkmtc" podUID="cb9c789a-4f39-46ee-8e61-5e39d65daf38" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.35:8080/healthz\": dial tcp 10.217.0.35:8080: connect: connection refused" Sep 29 09:31:48 crc kubenswrapper[4779]: I0929 09:31:48.273272 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44nv6" Sep 29 09:31:48 crc kubenswrapper[4779]: I0929 09:31:48.284460 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-jngcm" Sep 29 09:31:48 crc kubenswrapper[4779]: I0929 09:31:48.313837 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-wbk58" podStartSLOduration=120.313823721 podStartE2EDuration="2m0.313823721s" podCreationTimestamp="2025-09-29 09:29:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:31:48.284256815 +0000 UTC m=+140.265580719" watchObservedRunningTime="2025-09-29 09:31:48.313823721 +0000 UTC m=+140.295147625" Sep 29 09:31:48 crc kubenswrapper[4779]: I0929 09:31:48.353339 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 09:31:48 crc kubenswrapper[4779]: E0929 09:31:48.353523 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 09:31:48.853494217 +0000 UTC m=+140.834818121 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:48 crc kubenswrapper[4779]: I0929 09:31:48.353678 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rzc4j\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:48 crc kubenswrapper[4779]: E0929 09:31:48.353978 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 09:31:48.853970382 +0000 UTC m=+140.835294286 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rzc4j" (UID: "5f395791-fcf5-4602-903b-06c24127b40e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:48 crc kubenswrapper[4779]: I0929 09:31:48.439662 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-sfkpc" podStartSLOduration=120.439642268 podStartE2EDuration="2m0.439642268s" podCreationTimestamp="2025-09-29 09:29:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:31:48.374639435 +0000 UTC m=+140.355963339" watchObservedRunningTime="2025-09-29 09:31:48.439642268 +0000 UTC m=+140.420966172" Sep 29 09:31:48 crc kubenswrapper[4779]: I0929 09:31:48.454805 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 09:31:48 crc kubenswrapper[4779]: E0929 09:31:48.456388 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 09:31:48.95634577 +0000 UTC m=+140.937669674 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:48 crc kubenswrapper[4779]: I0929 09:31:48.535988 4779 patch_prober.go:28] interesting pod/router-default-5444994796-f4jjc container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 09:31:48 crc kubenswrapper[4779]: [-]has-synced failed: reason withheld Sep 29 09:31:48 crc kubenswrapper[4779]: [+]process-running ok Sep 29 09:31:48 crc kubenswrapper[4779]: healthz check failed Sep 29 09:31:48 crc kubenswrapper[4779]: I0929 09:31:48.536402 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-f4jjc" podUID="8a39cfe4-4366-4e3c-81a1-5a0694840afb" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 09:31:48 crc kubenswrapper[4779]: I0929 09:31:48.559788 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rzc4j\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:48 crc kubenswrapper[4779]: E0929 09:31:48.559943 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 09:31:49.059930805 +0000 UTC m=+141.041254709 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rzc4j" (UID: "5f395791-fcf5-4602-903b-06c24127b40e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:48 crc kubenswrapper[4779]: I0929 09:31:48.660737 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 09:31:48 crc kubenswrapper[4779]: E0929 09:31:48.661134 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 09:31:49.161118936 +0000 UTC m=+141.142442840 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:48 crc kubenswrapper[4779]: I0929 09:31:48.768851 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rzc4j\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:48 crc kubenswrapper[4779]: E0929 09:31:48.769308 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 09:31:49.269287842 +0000 UTC m=+141.250611746 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rzc4j" (UID: "5f395791-fcf5-4602-903b-06c24127b40e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:48 crc kubenswrapper[4779]: I0929 09:31:48.870280 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 09:31:48 crc kubenswrapper[4779]: E0929 09:31:48.870705 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 09:31:49.370673449 +0000 UTC m=+141.351997353 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:48 crc kubenswrapper[4779]: I0929 09:31:48.870804 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rzc4j\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:48 crc kubenswrapper[4779]: E0929 09:31:48.871275 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 09:31:49.371263768 +0000 UTC m=+141.352587882 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rzc4j" (UID: "5f395791-fcf5-4602-903b-06c24127b40e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:48 crc kubenswrapper[4779]: I0929 09:31:48.972291 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 09:31:48 crc kubenswrapper[4779]: E0929 09:31:48.972702 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 09:31:49.472686986 +0000 UTC m=+141.454010890 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:49 crc kubenswrapper[4779]: I0929 09:31:49.074136 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rzc4j\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:49 crc kubenswrapper[4779]: E0929 09:31:49.074528 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 09:31:49.574499017 +0000 UTC m=+141.555822921 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rzc4j" (UID: "5f395791-fcf5-4602-903b-06c24127b40e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:49 crc kubenswrapper[4779]: I0929 09:31:49.175411 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 09:31:49 crc kubenswrapper[4779]: E0929 09:31:49.175802 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 09:31:49.675774151 +0000 UTC m=+141.657098055 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:49 crc kubenswrapper[4779]: I0929 09:31:49.276266 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rzc4j\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:49 crc kubenswrapper[4779]: E0929 09:31:49.276776 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 09:31:49.776752246 +0000 UTC m=+141.758076330 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rzc4j" (UID: "5f395791-fcf5-4602-903b-06c24127b40e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:49 crc kubenswrapper[4779]: I0929 09:31:49.277591 4779 generic.go:334] "Generic (PLEG): container finished" podID="bbb3233c-65ea-4bb4-aaa5-1d80fef4638d" containerID="73b98a46386d9723b0f19c6b761bac75c7313cf4fbae6ab2a6d855c16a45a1c5" exitCode=0 Sep 29 09:31:49 crc kubenswrapper[4779]: I0929 09:31:49.277692 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29318970-52fch" event={"ID":"bbb3233c-65ea-4bb4-aaa5-1d80fef4638d","Type":"ContainerDied","Data":"73b98a46386d9723b0f19c6b761bac75c7313cf4fbae6ab2a6d855c16a45a1c5"} Sep 29 09:31:49 crc kubenswrapper[4779]: I0929 09:31:49.282739 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-vkr5d" event={"ID":"c167fa4c-07a4-4a57-8fc0-a56b59c54ee6","Type":"ContainerStarted","Data":"c04279b06867abcf30fc2558c711d5defa16c677264fc7de1936b3eb890040f5"} Sep 29 09:31:49 crc kubenswrapper[4779]: I0929 09:31:49.282804 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-vkr5d" event={"ID":"c167fa4c-07a4-4a57-8fc0-a56b59c54ee6","Type":"ContainerStarted","Data":"4bc9b9a065f6c00bc573c4b49815fdfffa1a8945967b383c63156b127f0dabb3"} Sep 29 09:31:49 crc kubenswrapper[4779]: I0929 09:31:49.290559 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-dkmtc" Sep 29 09:31:49 crc kubenswrapper[4779]: I0929 09:31:49.378613 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 09:31:49 crc kubenswrapper[4779]: E0929 09:31:49.378802 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 09:31:49.878773433 +0000 UTC m=+141.860097337 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:49 crc kubenswrapper[4779]: I0929 09:31:49.379311 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rzc4j\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:49 crc kubenswrapper[4779]: E0929 09:31:49.380356 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 09:31:49.880343002 +0000 UTC m=+141.861666906 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rzc4j" (UID: "5f395791-fcf5-4602-903b-06c24127b40e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:49 crc kubenswrapper[4779]: I0929 09:31:49.479867 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 09:31:49 crc kubenswrapper[4779]: E0929 09:31:49.480086 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 09:31:49.980048128 +0000 UTC m=+141.961372032 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:49 crc kubenswrapper[4779]: I0929 09:31:49.480240 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rzc4j\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:49 crc kubenswrapper[4779]: E0929 09:31:49.480664 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 09:31:49.980647736 +0000 UTC m=+141.961971640 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rzc4j" (UID: "5f395791-fcf5-4602-903b-06c24127b40e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:49 crc kubenswrapper[4779]: I0929 09:31:49.547762 4779 patch_prober.go:28] interesting pod/router-default-5444994796-f4jjc container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 09:31:49 crc kubenswrapper[4779]: [-]has-synced failed: reason withheld Sep 29 09:31:49 crc kubenswrapper[4779]: [+]process-running ok Sep 29 09:31:49 crc kubenswrapper[4779]: healthz check failed Sep 29 09:31:49 crc kubenswrapper[4779]: I0929 09:31:49.548259 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-f4jjc" podUID="8a39cfe4-4366-4e3c-81a1-5a0694840afb" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 09:31:49 crc kubenswrapper[4779]: I0929 09:31:49.550538 4779 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Sep 29 09:31:49 crc kubenswrapper[4779]: I0929 09:31:49.555247 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-z68v6"] Sep 29 09:31:49 crc kubenswrapper[4779]: I0929 09:31:49.557031 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-z68v6" Sep 29 09:31:49 crc kubenswrapper[4779]: I0929 09:31:49.560425 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Sep 29 09:31:49 crc kubenswrapper[4779]: I0929 09:31:49.580826 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-z68v6"] Sep 29 09:31:49 crc kubenswrapper[4779]: I0929 09:31:49.581298 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 09:31:49 crc kubenswrapper[4779]: I0929 09:31:49.581580 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8p7w2\" (UniqueName: \"kubernetes.io/projected/d480d7e4-9bc7-40ed-ab03-d091d67a7a9a-kube-api-access-8p7w2\") pod \"certified-operators-z68v6\" (UID: \"d480d7e4-9bc7-40ed-ab03-d091d67a7a9a\") " pod="openshift-marketplace/certified-operators-z68v6" Sep 29 09:31:49 crc kubenswrapper[4779]: I0929 09:31:49.581636 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d480d7e4-9bc7-40ed-ab03-d091d67a7a9a-utilities\") pod \"certified-operators-z68v6\" (UID: \"d480d7e4-9bc7-40ed-ab03-d091d67a7a9a\") " pod="openshift-marketplace/certified-operators-z68v6" Sep 29 09:31:49 crc kubenswrapper[4779]: I0929 09:31:49.581742 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d480d7e4-9bc7-40ed-ab03-d091d67a7a9a-catalog-content\") pod \"certified-operators-z68v6\" (UID: \"d480d7e4-9bc7-40ed-ab03-d091d67a7a9a\") " pod="openshift-marketplace/certified-operators-z68v6" Sep 29 09:31:49 crc kubenswrapper[4779]: E0929 09:31:49.581844 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 09:31:50.081828367 +0000 UTC m=+142.063152271 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:49 crc kubenswrapper[4779]: I0929 09:31:49.683064 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d480d7e4-9bc7-40ed-ab03-d091d67a7a9a-utilities\") pod \"certified-operators-z68v6\" (UID: \"d480d7e4-9bc7-40ed-ab03-d091d67a7a9a\") " pod="openshift-marketplace/certified-operators-z68v6" Sep 29 09:31:49 crc kubenswrapper[4779]: I0929 09:31:49.683167 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rzc4j\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:49 crc kubenswrapper[4779]: I0929 09:31:49.683220 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d480d7e4-9bc7-40ed-ab03-d091d67a7a9a-catalog-content\") pod \"certified-operators-z68v6\" (UID: \"d480d7e4-9bc7-40ed-ab03-d091d67a7a9a\") " pod="openshift-marketplace/certified-operators-z68v6" Sep 29 09:31:49 crc kubenswrapper[4779]: I0929 09:31:49.683244 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8p7w2\" (UniqueName: \"kubernetes.io/projected/d480d7e4-9bc7-40ed-ab03-d091d67a7a9a-kube-api-access-8p7w2\") pod \"certified-operators-z68v6\" (UID: \"d480d7e4-9bc7-40ed-ab03-d091d67a7a9a\") " pod="openshift-marketplace/certified-operators-z68v6" Sep 29 09:31:49 crc kubenswrapper[4779]: E0929 09:31:49.683561 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 09:31:50.183541315 +0000 UTC m=+142.164865209 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rzc4j" (UID: "5f395791-fcf5-4602-903b-06c24127b40e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:49 crc kubenswrapper[4779]: I0929 09:31:49.683722 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d480d7e4-9bc7-40ed-ab03-d091d67a7a9a-utilities\") pod \"certified-operators-z68v6\" (UID: \"d480d7e4-9bc7-40ed-ab03-d091d67a7a9a\") " pod="openshift-marketplace/certified-operators-z68v6" Sep 29 09:31:49 crc kubenswrapper[4779]: I0929 09:31:49.683753 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d480d7e4-9bc7-40ed-ab03-d091d67a7a9a-catalog-content\") pod \"certified-operators-z68v6\" (UID: \"d480d7e4-9bc7-40ed-ab03-d091d67a7a9a\") " pod="openshift-marketplace/certified-operators-z68v6" Sep 29 09:31:49 crc kubenswrapper[4779]: I0929 09:31:49.703112 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8p7w2\" (UniqueName: \"kubernetes.io/projected/d480d7e4-9bc7-40ed-ab03-d091d67a7a9a-kube-api-access-8p7w2\") pod \"certified-operators-z68v6\" (UID: \"d480d7e4-9bc7-40ed-ab03-d091d67a7a9a\") " pod="openshift-marketplace/certified-operators-z68v6" Sep 29 09:31:49 crc kubenswrapper[4779]: I0929 09:31:49.749188 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-rmf6p"] Sep 29 09:31:49 crc kubenswrapper[4779]: I0929 09:31:49.750044 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rmf6p" Sep 29 09:31:49 crc kubenswrapper[4779]: I0929 09:31:49.751757 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Sep 29 09:31:49 crc kubenswrapper[4779]: I0929 09:31:49.766125 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rmf6p"] Sep 29 09:31:49 crc kubenswrapper[4779]: E0929 09:31:49.784314 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 09:31:50.284288923 +0000 UTC m=+142.265612827 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:49 crc kubenswrapper[4779]: I0929 09:31:49.784359 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 09:31:49 crc kubenswrapper[4779]: I0929 09:31:49.784581 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2p6jj\" (UniqueName: \"kubernetes.io/projected/7db7e11b-22e3-45bf-a365-0a3583e0d52d-kube-api-access-2p6jj\") pod \"community-operators-rmf6p\" (UID: \"7db7e11b-22e3-45bf-a365-0a3583e0d52d\") " pod="openshift-marketplace/community-operators-rmf6p" Sep 29 09:31:49 crc kubenswrapper[4779]: I0929 09:31:49.784697 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rzc4j\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:49 crc kubenswrapper[4779]: E0929 09:31:49.785185 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 09:31:50.28517676 +0000 UTC m=+142.266500664 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rzc4j" (UID: "5f395791-fcf5-4602-903b-06c24127b40e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:49 crc kubenswrapper[4779]: I0929 09:31:49.785408 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7db7e11b-22e3-45bf-a365-0a3583e0d52d-catalog-content\") pod \"community-operators-rmf6p\" (UID: \"7db7e11b-22e3-45bf-a365-0a3583e0d52d\") " pod="openshift-marketplace/community-operators-rmf6p" Sep 29 09:31:49 crc kubenswrapper[4779]: I0929 09:31:49.785527 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7db7e11b-22e3-45bf-a365-0a3583e0d52d-utilities\") pod \"community-operators-rmf6p\" (UID: \"7db7e11b-22e3-45bf-a365-0a3583e0d52d\") " pod="openshift-marketplace/community-operators-rmf6p" Sep 29 09:31:49 crc kubenswrapper[4779]: I0929 09:31:49.873954 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-z68v6" Sep 29 09:31:49 crc kubenswrapper[4779]: I0929 09:31:49.886887 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 09:31:49 crc kubenswrapper[4779]: E0929 09:31:49.887138 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 09:31:50.387096774 +0000 UTC m=+142.368420678 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:49 crc kubenswrapper[4779]: I0929 09:31:49.898225 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2p6jj\" (UniqueName: \"kubernetes.io/projected/7db7e11b-22e3-45bf-a365-0a3583e0d52d-kube-api-access-2p6jj\") pod \"community-operators-rmf6p\" (UID: \"7db7e11b-22e3-45bf-a365-0a3583e0d52d\") " pod="openshift-marketplace/community-operators-rmf6p" Sep 29 09:31:49 crc kubenswrapper[4779]: I0929 09:31:49.898316 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rzc4j\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:49 crc kubenswrapper[4779]: I0929 09:31:49.898379 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7db7e11b-22e3-45bf-a365-0a3583e0d52d-catalog-content\") pod \"community-operators-rmf6p\" (UID: \"7db7e11b-22e3-45bf-a365-0a3583e0d52d\") " pod="openshift-marketplace/community-operators-rmf6p" Sep 29 09:31:49 crc kubenswrapper[4779]: I0929 09:31:49.898505 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7db7e11b-22e3-45bf-a365-0a3583e0d52d-utilities\") pod \"community-operators-rmf6p\" (UID: \"7db7e11b-22e3-45bf-a365-0a3583e0d52d\") " pod="openshift-marketplace/community-operators-rmf6p" Sep 29 09:31:49 crc kubenswrapper[4779]: E0929 09:31:49.898793 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 09:31:50.398773112 +0000 UTC m=+142.380097016 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rzc4j" (UID: "5f395791-fcf5-4602-903b-06c24127b40e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 09:31:49 crc kubenswrapper[4779]: I0929 09:31:49.899248 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7db7e11b-22e3-45bf-a365-0a3583e0d52d-utilities\") pod \"community-operators-rmf6p\" (UID: \"7db7e11b-22e3-45bf-a365-0a3583e0d52d\") " pod="openshift-marketplace/community-operators-rmf6p" Sep 29 09:31:49 crc kubenswrapper[4779]: I0929 09:31:49.899417 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7db7e11b-22e3-45bf-a365-0a3583e0d52d-catalog-content\") pod \"community-operators-rmf6p\" (UID: \"7db7e11b-22e3-45bf-a365-0a3583e0d52d\") " pod="openshift-marketplace/community-operators-rmf6p" Sep 29 09:31:49 crc kubenswrapper[4779]: I0929 09:31:49.917998 4779 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-09-29T09:31:49.550572119Z","Handler":null,"Name":""} Sep 29 09:31:49 crc kubenswrapper[4779]: I0929 09:31:49.919236 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2p6jj\" (UniqueName: \"kubernetes.io/projected/7db7e11b-22e3-45bf-a365-0a3583e0d52d-kube-api-access-2p6jj\") pod \"community-operators-rmf6p\" (UID: \"7db7e11b-22e3-45bf-a365-0a3583e0d52d\") " pod="openshift-marketplace/community-operators-rmf6p" Sep 29 09:31:49 crc kubenswrapper[4779]: I0929 09:31:49.921515 4779 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Sep 29 09:31:49 crc kubenswrapper[4779]: I0929 09:31:49.921770 4779 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Sep 29 09:31:49 crc kubenswrapper[4779]: I0929 09:31:49.950216 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-kxkqv"] Sep 29 09:31:49 crc kubenswrapper[4779]: I0929 09:31:49.951510 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kxkqv" Sep 29 09:31:49 crc kubenswrapper[4779]: I0929 09:31:49.966195 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-kxkqv"] Sep 29 09:31:50 crc kubenswrapper[4779]: I0929 09:31:50.007198 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 09:31:50 crc kubenswrapper[4779]: I0929 09:31:50.007375 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57fa213e-7e5b-451b-a6b1-d03c20a64e0f-utilities\") pod \"certified-operators-kxkqv\" (UID: \"57fa213e-7e5b-451b-a6b1-d03c20a64e0f\") " pod="openshift-marketplace/certified-operators-kxkqv" Sep 29 09:31:50 crc kubenswrapper[4779]: I0929 09:31:50.007451 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m9xsj\" (UniqueName: \"kubernetes.io/projected/57fa213e-7e5b-451b-a6b1-d03c20a64e0f-kube-api-access-m9xsj\") pod \"certified-operators-kxkqv\" (UID: \"57fa213e-7e5b-451b-a6b1-d03c20a64e0f\") " pod="openshift-marketplace/certified-operators-kxkqv" Sep 29 09:31:50 crc kubenswrapper[4779]: I0929 09:31:50.007517 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57fa213e-7e5b-451b-a6b1-d03c20a64e0f-catalog-content\") pod \"certified-operators-kxkqv\" (UID: \"57fa213e-7e5b-451b-a6b1-d03c20a64e0f\") " pod="openshift-marketplace/certified-operators-kxkqv" Sep 29 09:31:50 crc kubenswrapper[4779]: I0929 09:31:50.017511 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Sep 29 09:31:50 crc kubenswrapper[4779]: I0929 09:31:50.067096 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rmf6p" Sep 29 09:31:50 crc kubenswrapper[4779]: I0929 09:31:50.111584 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57fa213e-7e5b-451b-a6b1-d03c20a64e0f-catalog-content\") pod \"certified-operators-kxkqv\" (UID: \"57fa213e-7e5b-451b-a6b1-d03c20a64e0f\") " pod="openshift-marketplace/certified-operators-kxkqv" Sep 29 09:31:50 crc kubenswrapper[4779]: I0929 09:31:50.111639 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57fa213e-7e5b-451b-a6b1-d03c20a64e0f-utilities\") pod \"certified-operators-kxkqv\" (UID: \"57fa213e-7e5b-451b-a6b1-d03c20a64e0f\") " pod="openshift-marketplace/certified-operators-kxkqv" Sep 29 09:31:50 crc kubenswrapper[4779]: I0929 09:31:50.111704 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rzc4j\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:50 crc kubenswrapper[4779]: I0929 09:31:50.111732 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m9xsj\" (UniqueName: \"kubernetes.io/projected/57fa213e-7e5b-451b-a6b1-d03c20a64e0f-kube-api-access-m9xsj\") pod \"certified-operators-kxkqv\" (UID: \"57fa213e-7e5b-451b-a6b1-d03c20a64e0f\") " pod="openshift-marketplace/certified-operators-kxkqv" Sep 29 09:31:50 crc kubenswrapper[4779]: I0929 09:31:50.113525 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57fa213e-7e5b-451b-a6b1-d03c20a64e0f-utilities\") pod \"certified-operators-kxkqv\" (UID: \"57fa213e-7e5b-451b-a6b1-d03c20a64e0f\") " pod="openshift-marketplace/certified-operators-kxkqv" Sep 29 09:31:50 crc kubenswrapper[4779]: I0929 09:31:50.113601 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57fa213e-7e5b-451b-a6b1-d03c20a64e0f-catalog-content\") pod \"certified-operators-kxkqv\" (UID: \"57fa213e-7e5b-451b-a6b1-d03c20a64e0f\") " pod="openshift-marketplace/certified-operators-kxkqv" Sep 29 09:31:50 crc kubenswrapper[4779]: I0929 09:31:50.118795 4779 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 29 09:31:50 crc kubenswrapper[4779]: I0929 09:31:50.118843 4779 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rzc4j\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:50 crc kubenswrapper[4779]: I0929 09:31:50.119317 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-z68v6"] Sep 29 09:31:50 crc kubenswrapper[4779]: I0929 09:31:50.133447 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m9xsj\" (UniqueName: \"kubernetes.io/projected/57fa213e-7e5b-451b-a6b1-d03c20a64e0f-kube-api-access-m9xsj\") pod \"certified-operators-kxkqv\" (UID: \"57fa213e-7e5b-451b-a6b1-d03c20a64e0f\") " pod="openshift-marketplace/certified-operators-kxkqv" Sep 29 09:31:50 crc kubenswrapper[4779]: I0929 09:31:50.147649 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-7dw6m"] Sep 29 09:31:50 crc kubenswrapper[4779]: I0929 09:31:50.149640 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7dw6m" Sep 29 09:31:50 crc kubenswrapper[4779]: I0929 09:31:50.160936 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7dw6m"] Sep 29 09:31:50 crc kubenswrapper[4779]: I0929 09:31:50.177833 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rzc4j\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:50 crc kubenswrapper[4779]: I0929 09:31:50.182190 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:50 crc kubenswrapper[4779]: I0929 09:31:50.214197 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06a364e4-0ea6-4fe1-bc83-7c98333e2163-utilities\") pod \"community-operators-7dw6m\" (UID: \"06a364e4-0ea6-4fe1-bc83-7c98333e2163\") " pod="openshift-marketplace/community-operators-7dw6m" Sep 29 09:31:50 crc kubenswrapper[4779]: I0929 09:31:50.214291 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6xcmd\" (UniqueName: \"kubernetes.io/projected/06a364e4-0ea6-4fe1-bc83-7c98333e2163-kube-api-access-6xcmd\") pod \"community-operators-7dw6m\" (UID: \"06a364e4-0ea6-4fe1-bc83-7c98333e2163\") " pod="openshift-marketplace/community-operators-7dw6m" Sep 29 09:31:50 crc kubenswrapper[4779]: I0929 09:31:50.214315 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06a364e4-0ea6-4fe1-bc83-7c98333e2163-catalog-content\") pod \"community-operators-7dw6m\" (UID: \"06a364e4-0ea6-4fe1-bc83-7c98333e2163\") " pod="openshift-marketplace/community-operators-7dw6m" Sep 29 09:31:50 crc kubenswrapper[4779]: I0929 09:31:50.277308 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kxkqv" Sep 29 09:31:50 crc kubenswrapper[4779]: I0929 09:31:50.301604 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rmf6p"] Sep 29 09:31:50 crc kubenswrapper[4779]: I0929 09:31:50.315684 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6xcmd\" (UniqueName: \"kubernetes.io/projected/06a364e4-0ea6-4fe1-bc83-7c98333e2163-kube-api-access-6xcmd\") pod \"community-operators-7dw6m\" (UID: \"06a364e4-0ea6-4fe1-bc83-7c98333e2163\") " pod="openshift-marketplace/community-operators-7dw6m" Sep 29 09:31:50 crc kubenswrapper[4779]: I0929 09:31:50.315733 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06a364e4-0ea6-4fe1-bc83-7c98333e2163-catalog-content\") pod \"community-operators-7dw6m\" (UID: \"06a364e4-0ea6-4fe1-bc83-7c98333e2163\") " pod="openshift-marketplace/community-operators-7dw6m" Sep 29 09:31:50 crc kubenswrapper[4779]: I0929 09:31:50.316053 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z68v6" event={"ID":"d480d7e4-9bc7-40ed-ab03-d091d67a7a9a","Type":"ContainerStarted","Data":"057d182fb75de135503a3cd812d8215c9cfd3568a159737254e7b424b4d83301"} Sep 29 09:31:50 crc kubenswrapper[4779]: I0929 09:31:50.316095 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z68v6" event={"ID":"d480d7e4-9bc7-40ed-ab03-d091d67a7a9a","Type":"ContainerStarted","Data":"411d96155f06c5750fe57d49c007396a0a9fef4975583b0f7a994cba0d8523ab"} Sep 29 09:31:50 crc kubenswrapper[4779]: I0929 09:31:50.316551 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06a364e4-0ea6-4fe1-bc83-7c98333e2163-catalog-content\") pod \"community-operators-7dw6m\" (UID: \"06a364e4-0ea6-4fe1-bc83-7c98333e2163\") " pod="openshift-marketplace/community-operators-7dw6m" Sep 29 09:31:50 crc kubenswrapper[4779]: I0929 09:31:50.316612 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06a364e4-0ea6-4fe1-bc83-7c98333e2163-utilities\") pod \"community-operators-7dw6m\" (UID: \"06a364e4-0ea6-4fe1-bc83-7c98333e2163\") " pod="openshift-marketplace/community-operators-7dw6m" Sep 29 09:31:50 crc kubenswrapper[4779]: I0929 09:31:50.316897 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06a364e4-0ea6-4fe1-bc83-7c98333e2163-utilities\") pod \"community-operators-7dw6m\" (UID: \"06a364e4-0ea6-4fe1-bc83-7c98333e2163\") " pod="openshift-marketplace/community-operators-7dw6m" Sep 29 09:31:50 crc kubenswrapper[4779]: I0929 09:31:50.323429 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-vkr5d" event={"ID":"c167fa4c-07a4-4a57-8fc0-a56b59c54ee6","Type":"ContainerStarted","Data":"866324e6d2060a8777766641db3e6e9739250f040cc85d94660302671cbd5893"} Sep 29 09:31:50 crc kubenswrapper[4779]: I0929 09:31:50.333674 4779 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 09:31:50 crc kubenswrapper[4779]: I0929 09:31:50.342311 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6xcmd\" (UniqueName: \"kubernetes.io/projected/06a364e4-0ea6-4fe1-bc83-7c98333e2163-kube-api-access-6xcmd\") pod \"community-operators-7dw6m\" (UID: \"06a364e4-0ea6-4fe1-bc83-7c98333e2163\") " pod="openshift-marketplace/community-operators-7dw6m" Sep 29 09:31:50 crc kubenswrapper[4779]: I0929 09:31:50.359527 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-vkr5d" podStartSLOduration=10.359507894 podStartE2EDuration="10.359507894s" podCreationTimestamp="2025-09-29 09:31:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:31:50.354751348 +0000 UTC m=+142.336075262" watchObservedRunningTime="2025-09-29 09:31:50.359507894 +0000 UTC m=+142.340831798" Sep 29 09:31:50 crc kubenswrapper[4779]: I0929 09:31:50.466356 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7dw6m" Sep 29 09:31:50 crc kubenswrapper[4779]: I0929 09:31:50.467075 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-rzc4j"] Sep 29 09:31:50 crc kubenswrapper[4779]: W0929 09:31:50.489110 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5f395791_fcf5_4602_903b_06c24127b40e.slice/crio-ce92b6e15a8ce39fe772d0e51692506dd60c935b49c63056ae3d3c0a904a433d WatchSource:0}: Error finding container ce92b6e15a8ce39fe772d0e51692506dd60c935b49c63056ae3d3c0a904a433d: Status 404 returned error can't find the container with id ce92b6e15a8ce39fe772d0e51692506dd60c935b49c63056ae3d3c0a904a433d Sep 29 09:31:50 crc kubenswrapper[4779]: I0929 09:31:50.550148 4779 patch_prober.go:28] interesting pod/router-default-5444994796-f4jjc container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 09:31:50 crc kubenswrapper[4779]: [-]has-synced failed: reason withheld Sep 29 09:31:50 crc kubenswrapper[4779]: [+]process-running ok Sep 29 09:31:50 crc kubenswrapper[4779]: healthz check failed Sep 29 09:31:50 crc kubenswrapper[4779]: I0929 09:31:50.550510 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-f4jjc" podUID="8a39cfe4-4366-4e3c-81a1-5a0694840afb" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 09:31:50 crc kubenswrapper[4779]: I0929 09:31:50.674400 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-kxkqv"] Sep 29 09:31:50 crc kubenswrapper[4779]: W0929 09:31:50.691501 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod57fa213e_7e5b_451b_a6b1_d03c20a64e0f.slice/crio-a231cb18b883f08e7f6d3f834b9e670061fe3677a0d8ba31a3e8a7f9b57624bb WatchSource:0}: Error finding container a231cb18b883f08e7f6d3f834b9e670061fe3677a0d8ba31a3e8a7f9b57624bb: Status 404 returned error can't find the container with id a231cb18b883f08e7f6d3f834b9e670061fe3677a0d8ba31a3e8a7f9b57624bb Sep 29 09:31:50 crc kubenswrapper[4779]: I0929 09:31:50.695187 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29318970-52fch" Sep 29 09:31:50 crc kubenswrapper[4779]: I0929 09:31:50.726081 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/bbb3233c-65ea-4bb4-aaa5-1d80fef4638d-config-volume\") pod \"bbb3233c-65ea-4bb4-aaa5-1d80fef4638d\" (UID: \"bbb3233c-65ea-4bb4-aaa5-1d80fef4638d\") " Sep 29 09:31:50 crc kubenswrapper[4779]: I0929 09:31:50.726123 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/bbb3233c-65ea-4bb4-aaa5-1d80fef4638d-secret-volume\") pod \"bbb3233c-65ea-4bb4-aaa5-1d80fef4638d\" (UID: \"bbb3233c-65ea-4bb4-aaa5-1d80fef4638d\") " Sep 29 09:31:50 crc kubenswrapper[4779]: I0929 09:31:50.726240 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mtdhr\" (UniqueName: \"kubernetes.io/projected/bbb3233c-65ea-4bb4-aaa5-1d80fef4638d-kube-api-access-mtdhr\") pod \"bbb3233c-65ea-4bb4-aaa5-1d80fef4638d\" (UID: \"bbb3233c-65ea-4bb4-aaa5-1d80fef4638d\") " Sep 29 09:31:50 crc kubenswrapper[4779]: I0929 09:31:50.727228 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bbb3233c-65ea-4bb4-aaa5-1d80fef4638d-config-volume" (OuterVolumeSpecName: "config-volume") pod "bbb3233c-65ea-4bb4-aaa5-1d80fef4638d" (UID: "bbb3233c-65ea-4bb4-aaa5-1d80fef4638d"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:31:50 crc kubenswrapper[4779]: I0929 09:31:50.734044 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bbb3233c-65ea-4bb4-aaa5-1d80fef4638d-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "bbb3233c-65ea-4bb4-aaa5-1d80fef4638d" (UID: "bbb3233c-65ea-4bb4-aaa5-1d80fef4638d"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:31:50 crc kubenswrapper[4779]: I0929 09:31:50.736725 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bbb3233c-65ea-4bb4-aaa5-1d80fef4638d-kube-api-access-mtdhr" (OuterVolumeSpecName: "kube-api-access-mtdhr") pod "bbb3233c-65ea-4bb4-aaa5-1d80fef4638d" (UID: "bbb3233c-65ea-4bb4-aaa5-1d80fef4638d"). InnerVolumeSpecName "kube-api-access-mtdhr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:31:50 crc kubenswrapper[4779]: I0929 09:31:50.756012 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Sep 29 09:31:50 crc kubenswrapper[4779]: I0929 09:31:50.827875 4779 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/bbb3233c-65ea-4bb4-aaa5-1d80fef4638d-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 29 09:31:50 crc kubenswrapper[4779]: I0929 09:31:50.828038 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mtdhr\" (UniqueName: \"kubernetes.io/projected/bbb3233c-65ea-4bb4-aaa5-1d80fef4638d-kube-api-access-mtdhr\") on node \"crc\" DevicePath \"\"" Sep 29 09:31:50 crc kubenswrapper[4779]: I0929 09:31:50.828052 4779 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/bbb3233c-65ea-4bb4-aaa5-1d80fef4638d-config-volume\") on node \"crc\" DevicePath \"\"" Sep 29 09:31:50 crc kubenswrapper[4779]: I0929 09:31:50.853640 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7dw6m"] Sep 29 09:31:51 crc kubenswrapper[4779]: I0929 09:31:51.331711 4779 generic.go:334] "Generic (PLEG): container finished" podID="7db7e11b-22e3-45bf-a365-0a3583e0d52d" containerID="11e22128abf4144e6386a7f1e658a211669cc8409eba0c55b12cbeb95458c873" exitCode=0 Sep 29 09:31:51 crc kubenswrapper[4779]: I0929 09:31:51.331754 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rmf6p" event={"ID":"7db7e11b-22e3-45bf-a365-0a3583e0d52d","Type":"ContainerDied","Data":"11e22128abf4144e6386a7f1e658a211669cc8409eba0c55b12cbeb95458c873"} Sep 29 09:31:51 crc kubenswrapper[4779]: I0929 09:31:51.331798 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rmf6p" event={"ID":"7db7e11b-22e3-45bf-a365-0a3583e0d52d","Type":"ContainerStarted","Data":"80c3c2f519eaba2a5a4e5a169a68258d358c961e78eb5aa6a5a9a66c5ad7a4ae"} Sep 29 09:31:51 crc kubenswrapper[4779]: I0929 09:31:51.345937 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Sep 29 09:31:51 crc kubenswrapper[4779]: E0929 09:31:51.346202 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bbb3233c-65ea-4bb4-aaa5-1d80fef4638d" containerName="collect-profiles" Sep 29 09:31:51 crc kubenswrapper[4779]: I0929 09:31:51.346220 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="bbb3233c-65ea-4bb4-aaa5-1d80fef4638d" containerName="collect-profiles" Sep 29 09:31:51 crc kubenswrapper[4779]: I0929 09:31:51.346338 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="bbb3233c-65ea-4bb4-aaa5-1d80fef4638d" containerName="collect-profiles" Sep 29 09:31:51 crc kubenswrapper[4779]: I0929 09:31:51.346665 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 09:31:51 crc kubenswrapper[4779]: I0929 09:31:51.348560 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Sep 29 09:31:51 crc kubenswrapper[4779]: I0929 09:31:51.348893 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Sep 29 09:31:51 crc kubenswrapper[4779]: I0929 09:31:51.351133 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Sep 29 09:31:51 crc kubenswrapper[4779]: I0929 09:31:51.351746 4779 generic.go:334] "Generic (PLEG): container finished" podID="d480d7e4-9bc7-40ed-ab03-d091d67a7a9a" containerID="057d182fb75de135503a3cd812d8215c9cfd3568a159737254e7b424b4d83301" exitCode=0 Sep 29 09:31:51 crc kubenswrapper[4779]: I0929 09:31:51.351949 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z68v6" event={"ID":"d480d7e4-9bc7-40ed-ab03-d091d67a7a9a","Type":"ContainerDied","Data":"057d182fb75de135503a3cd812d8215c9cfd3568a159737254e7b424b4d83301"} Sep 29 09:31:51 crc kubenswrapper[4779]: I0929 09:31:51.354379 4779 generic.go:334] "Generic (PLEG): container finished" podID="57fa213e-7e5b-451b-a6b1-d03c20a64e0f" containerID="75e695d4e2f6484fb919653b558c8e6e9087211b848b73e730785f30d7c716ae" exitCode=0 Sep 29 09:31:51 crc kubenswrapper[4779]: I0929 09:31:51.354441 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kxkqv" event={"ID":"57fa213e-7e5b-451b-a6b1-d03c20a64e0f","Type":"ContainerDied","Data":"75e695d4e2f6484fb919653b558c8e6e9087211b848b73e730785f30d7c716ae"} Sep 29 09:31:51 crc kubenswrapper[4779]: I0929 09:31:51.354467 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kxkqv" event={"ID":"57fa213e-7e5b-451b-a6b1-d03c20a64e0f","Type":"ContainerStarted","Data":"a231cb18b883f08e7f6d3f834b9e670061fe3677a0d8ba31a3e8a7f9b57624bb"} Sep 29 09:31:51 crc kubenswrapper[4779]: I0929 09:31:51.360018 4779 generic.go:334] "Generic (PLEG): container finished" podID="06a364e4-0ea6-4fe1-bc83-7c98333e2163" containerID="931731ba6ed01e26f417d2ac11ed74d44354f8324a11f75d03ba06dce945a09b" exitCode=0 Sep 29 09:31:51 crc kubenswrapper[4779]: I0929 09:31:51.361147 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7dw6m" event={"ID":"06a364e4-0ea6-4fe1-bc83-7c98333e2163","Type":"ContainerDied","Data":"931731ba6ed01e26f417d2ac11ed74d44354f8324a11f75d03ba06dce945a09b"} Sep 29 09:31:51 crc kubenswrapper[4779]: I0929 09:31:51.361191 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7dw6m" event={"ID":"06a364e4-0ea6-4fe1-bc83-7c98333e2163","Type":"ContainerStarted","Data":"ce08eb43963096fddfe4b9ce3d7d69572fc7f9a0ac133f38d1b9db3126f30a33"} Sep 29 09:31:51 crc kubenswrapper[4779]: I0929 09:31:51.364987 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" event={"ID":"5f395791-fcf5-4602-903b-06c24127b40e","Type":"ContainerStarted","Data":"d180904e8d065dee7e1f774f298b4fd383e2086e2b562127dd902b0c37359099"} Sep 29 09:31:51 crc kubenswrapper[4779]: I0929 09:31:51.365023 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" event={"ID":"5f395791-fcf5-4602-903b-06c24127b40e","Type":"ContainerStarted","Data":"ce92b6e15a8ce39fe772d0e51692506dd60c935b49c63056ae3d3c0a904a433d"} Sep 29 09:31:51 crc kubenswrapper[4779]: I0929 09:31:51.365407 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:31:51 crc kubenswrapper[4779]: I0929 09:31:51.370053 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29318970-52fch" Sep 29 09:31:51 crc kubenswrapper[4779]: I0929 09:31:51.370520 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29318970-52fch" event={"ID":"bbb3233c-65ea-4bb4-aaa5-1d80fef4638d","Type":"ContainerDied","Data":"c75d25670311e415bcffa080d5c415565eb50ebd831aad77ad15849e17bd8860"} Sep 29 09:31:51 crc kubenswrapper[4779]: I0929 09:31:51.370535 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c75d25670311e415bcffa080d5c415565eb50ebd831aad77ad15849e17bd8860" Sep 29 09:31:51 crc kubenswrapper[4779]: I0929 09:31:51.425356 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" podStartSLOduration=123.425342202 podStartE2EDuration="2m3.425342202s" podCreationTimestamp="2025-09-29 09:29:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:31:51.424765665 +0000 UTC m=+143.406089569" watchObservedRunningTime="2025-09-29 09:31:51.425342202 +0000 UTC m=+143.406666106" Sep 29 09:31:51 crc kubenswrapper[4779]: I0929 09:31:51.439745 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c9d9ebf4-f861-45c3-aef9-0abc2273c218-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"c9d9ebf4-f861-45c3-aef9-0abc2273c218\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 09:31:51 crc kubenswrapper[4779]: I0929 09:31:51.439923 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c9d9ebf4-f861-45c3-aef9-0abc2273c218-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"c9d9ebf4-f861-45c3-aef9-0abc2273c218\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 09:31:51 crc kubenswrapper[4779]: I0929 09:31:51.533519 4779 patch_prober.go:28] interesting pod/router-default-5444994796-f4jjc container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 09:31:51 crc kubenswrapper[4779]: [-]has-synced failed: reason withheld Sep 29 09:31:51 crc kubenswrapper[4779]: [+]process-running ok Sep 29 09:31:51 crc kubenswrapper[4779]: healthz check failed Sep 29 09:31:51 crc kubenswrapper[4779]: I0929 09:31:51.533831 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-f4jjc" podUID="8a39cfe4-4366-4e3c-81a1-5a0694840afb" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 09:31:51 crc kubenswrapper[4779]: I0929 09:31:51.541194 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c9d9ebf4-f861-45c3-aef9-0abc2273c218-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"c9d9ebf4-f861-45c3-aef9-0abc2273c218\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 09:31:51 crc kubenswrapper[4779]: I0929 09:31:51.541374 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c9d9ebf4-f861-45c3-aef9-0abc2273c218-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"c9d9ebf4-f861-45c3-aef9-0abc2273c218\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 09:31:51 crc kubenswrapper[4779]: I0929 09:31:51.541385 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c9d9ebf4-f861-45c3-aef9-0abc2273c218-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"c9d9ebf4-f861-45c3-aef9-0abc2273c218\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 09:31:51 crc kubenswrapper[4779]: I0929 09:31:51.565787 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c9d9ebf4-f861-45c3-aef9-0abc2273c218-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"c9d9ebf4-f861-45c3-aef9-0abc2273c218\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 09:31:51 crc kubenswrapper[4779]: I0929 09:31:51.674896 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 09:31:51 crc kubenswrapper[4779]: I0929 09:31:51.746023 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-d2rck"] Sep 29 09:31:51 crc kubenswrapper[4779]: I0929 09:31:51.747210 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-d2rck" Sep 29 09:31:51 crc kubenswrapper[4779]: I0929 09:31:51.751408 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Sep 29 09:31:51 crc kubenswrapper[4779]: I0929 09:31:51.754028 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-d2rck"] Sep 29 09:31:51 crc kubenswrapper[4779]: I0929 09:31:51.848133 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a21410c1-e475-491e-9298-2bbba632e116-utilities\") pod \"redhat-marketplace-d2rck\" (UID: \"a21410c1-e475-491e-9298-2bbba632e116\") " pod="openshift-marketplace/redhat-marketplace-d2rck" Sep 29 09:31:51 crc kubenswrapper[4779]: I0929 09:31:51.848475 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xdnpg\" (UniqueName: \"kubernetes.io/projected/a21410c1-e475-491e-9298-2bbba632e116-kube-api-access-xdnpg\") pod \"redhat-marketplace-d2rck\" (UID: \"a21410c1-e475-491e-9298-2bbba632e116\") " pod="openshift-marketplace/redhat-marketplace-d2rck" Sep 29 09:31:51 crc kubenswrapper[4779]: I0929 09:31:51.848530 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a21410c1-e475-491e-9298-2bbba632e116-catalog-content\") pod \"redhat-marketplace-d2rck\" (UID: \"a21410c1-e475-491e-9298-2bbba632e116\") " pod="openshift-marketplace/redhat-marketplace-d2rck" Sep 29 09:31:51 crc kubenswrapper[4779]: I0929 09:31:51.873111 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-zhwns" Sep 29 09:31:51 crc kubenswrapper[4779]: I0929 09:31:51.875642 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Sep 29 09:31:51 crc kubenswrapper[4779]: I0929 09:31:51.878224 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-zhwns" Sep 29 09:31:51 crc kubenswrapper[4779]: I0929 09:31:51.950016 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a21410c1-e475-491e-9298-2bbba632e116-catalog-content\") pod \"redhat-marketplace-d2rck\" (UID: \"a21410c1-e475-491e-9298-2bbba632e116\") " pod="openshift-marketplace/redhat-marketplace-d2rck" Sep 29 09:31:51 crc kubenswrapper[4779]: I0929 09:31:51.950089 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a21410c1-e475-491e-9298-2bbba632e116-utilities\") pod \"redhat-marketplace-d2rck\" (UID: \"a21410c1-e475-491e-9298-2bbba632e116\") " pod="openshift-marketplace/redhat-marketplace-d2rck" Sep 29 09:31:51 crc kubenswrapper[4779]: I0929 09:31:51.950206 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xdnpg\" (UniqueName: \"kubernetes.io/projected/a21410c1-e475-491e-9298-2bbba632e116-kube-api-access-xdnpg\") pod \"redhat-marketplace-d2rck\" (UID: \"a21410c1-e475-491e-9298-2bbba632e116\") " pod="openshift-marketplace/redhat-marketplace-d2rck" Sep 29 09:31:51 crc kubenswrapper[4779]: I0929 09:31:51.951637 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a21410c1-e475-491e-9298-2bbba632e116-catalog-content\") pod \"redhat-marketplace-d2rck\" (UID: \"a21410c1-e475-491e-9298-2bbba632e116\") " pod="openshift-marketplace/redhat-marketplace-d2rck" Sep 29 09:31:51 crc kubenswrapper[4779]: I0929 09:31:51.951865 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a21410c1-e475-491e-9298-2bbba632e116-utilities\") pod \"redhat-marketplace-d2rck\" (UID: \"a21410c1-e475-491e-9298-2bbba632e116\") " pod="openshift-marketplace/redhat-marketplace-d2rck" Sep 29 09:31:52 crc kubenswrapper[4779]: I0929 09:31:52.036507 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xdnpg\" (UniqueName: \"kubernetes.io/projected/a21410c1-e475-491e-9298-2bbba632e116-kube-api-access-xdnpg\") pod \"redhat-marketplace-d2rck\" (UID: \"a21410c1-e475-491e-9298-2bbba632e116\") " pod="openshift-marketplace/redhat-marketplace-d2rck" Sep 29 09:31:52 crc kubenswrapper[4779]: I0929 09:31:52.072532 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-d2rck" Sep 29 09:31:52 crc kubenswrapper[4779]: I0929 09:31:52.144566 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-92pg4"] Sep 29 09:31:52 crc kubenswrapper[4779]: I0929 09:31:52.145532 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-92pg4" Sep 29 09:31:52 crc kubenswrapper[4779]: I0929 09:31:52.176524 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-92pg4"] Sep 29 09:31:52 crc kubenswrapper[4779]: I0929 09:31:52.187078 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-wbk58" Sep 29 09:31:52 crc kubenswrapper[4779]: I0929 09:31:52.257182 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l98js\" (UniqueName: \"kubernetes.io/projected/b32e508f-1532-471b-89cc-3af8e07b50a0-kube-api-access-l98js\") pod \"redhat-marketplace-92pg4\" (UID: \"b32e508f-1532-471b-89cc-3af8e07b50a0\") " pod="openshift-marketplace/redhat-marketplace-92pg4" Sep 29 09:31:52 crc kubenswrapper[4779]: I0929 09:31:52.257661 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b32e508f-1532-471b-89cc-3af8e07b50a0-utilities\") pod \"redhat-marketplace-92pg4\" (UID: \"b32e508f-1532-471b-89cc-3af8e07b50a0\") " pod="openshift-marketplace/redhat-marketplace-92pg4" Sep 29 09:31:52 crc kubenswrapper[4779]: I0929 09:31:52.257882 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b32e508f-1532-471b-89cc-3af8e07b50a0-catalog-content\") pod \"redhat-marketplace-92pg4\" (UID: \"b32e508f-1532-471b-89cc-3af8e07b50a0\") " pod="openshift-marketplace/redhat-marketplace-92pg4" Sep 29 09:31:52 crc kubenswrapper[4779]: I0929 09:31:52.322031 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-d2rck"] Sep 29 09:31:52 crc kubenswrapper[4779]: W0929 09:31:52.351145 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda21410c1_e475_491e_9298_2bbba632e116.slice/crio-560463f99775811d5c6f8404a886d1c9b7d2f33812d319117d8e48c8b4f29312 WatchSource:0}: Error finding container 560463f99775811d5c6f8404a886d1c9b7d2f33812d319117d8e48c8b4f29312: Status 404 returned error can't find the container with id 560463f99775811d5c6f8404a886d1c9b7d2f33812d319117d8e48c8b4f29312 Sep 29 09:31:52 crc kubenswrapper[4779]: I0929 09:31:52.360504 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l98js\" (UniqueName: \"kubernetes.io/projected/b32e508f-1532-471b-89cc-3af8e07b50a0-kube-api-access-l98js\") pod \"redhat-marketplace-92pg4\" (UID: \"b32e508f-1532-471b-89cc-3af8e07b50a0\") " pod="openshift-marketplace/redhat-marketplace-92pg4" Sep 29 09:31:52 crc kubenswrapper[4779]: I0929 09:31:52.360593 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b32e508f-1532-471b-89cc-3af8e07b50a0-utilities\") pod \"redhat-marketplace-92pg4\" (UID: \"b32e508f-1532-471b-89cc-3af8e07b50a0\") " pod="openshift-marketplace/redhat-marketplace-92pg4" Sep 29 09:31:52 crc kubenswrapper[4779]: I0929 09:31:52.360688 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b32e508f-1532-471b-89cc-3af8e07b50a0-catalog-content\") pod \"redhat-marketplace-92pg4\" (UID: \"b32e508f-1532-471b-89cc-3af8e07b50a0\") " pod="openshift-marketplace/redhat-marketplace-92pg4" Sep 29 09:31:52 crc kubenswrapper[4779]: I0929 09:31:52.361686 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b32e508f-1532-471b-89cc-3af8e07b50a0-catalog-content\") pod \"redhat-marketplace-92pg4\" (UID: \"b32e508f-1532-471b-89cc-3af8e07b50a0\") " pod="openshift-marketplace/redhat-marketplace-92pg4" Sep 29 09:31:52 crc kubenswrapper[4779]: I0929 09:31:52.363468 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b32e508f-1532-471b-89cc-3af8e07b50a0-utilities\") pod \"redhat-marketplace-92pg4\" (UID: \"b32e508f-1532-471b-89cc-3af8e07b50a0\") " pod="openshift-marketplace/redhat-marketplace-92pg4" Sep 29 09:31:52 crc kubenswrapper[4779]: I0929 09:31:52.379584 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"c9d9ebf4-f861-45c3-aef9-0abc2273c218","Type":"ContainerStarted","Data":"4cfc5efe5012787d46779c8fc744fcef981d26d1be4cbc5c6780d1166d37ce5b"} Sep 29 09:31:52 crc kubenswrapper[4779]: I0929 09:31:52.381241 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d2rck" event={"ID":"a21410c1-e475-491e-9298-2bbba632e116","Type":"ContainerStarted","Data":"560463f99775811d5c6f8404a886d1c9b7d2f33812d319117d8e48c8b4f29312"} Sep 29 09:31:52 crc kubenswrapper[4779]: I0929 09:31:52.384187 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l98js\" (UniqueName: \"kubernetes.io/projected/b32e508f-1532-471b-89cc-3af8e07b50a0-kube-api-access-l98js\") pod \"redhat-marketplace-92pg4\" (UID: \"b32e508f-1532-471b-89cc-3af8e07b50a0\") " pod="openshift-marketplace/redhat-marketplace-92pg4" Sep 29 09:31:52 crc kubenswrapper[4779]: I0929 09:31:52.470754 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-92pg4" Sep 29 09:31:52 crc kubenswrapper[4779]: I0929 09:31:52.541223 4779 patch_prober.go:28] interesting pod/router-default-5444994796-f4jjc container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 09:31:52 crc kubenswrapper[4779]: [-]has-synced failed: reason withheld Sep 29 09:31:52 crc kubenswrapper[4779]: [+]process-running ok Sep 29 09:31:52 crc kubenswrapper[4779]: healthz check failed Sep 29 09:31:52 crc kubenswrapper[4779]: I0929 09:31:52.541604 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-f4jjc" podUID="8a39cfe4-4366-4e3c-81a1-5a0694840afb" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 09:31:52 crc kubenswrapper[4779]: I0929 09:31:52.754168 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-8pkd5"] Sep 29 09:31:52 crc kubenswrapper[4779]: I0929 09:31:52.755340 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8pkd5" Sep 29 09:31:52 crc kubenswrapper[4779]: I0929 09:31:52.756270 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-8pkd5"] Sep 29 09:31:52 crc kubenswrapper[4779]: I0929 09:31:52.758106 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Sep 29 09:31:52 crc kubenswrapper[4779]: I0929 09:31:52.866997 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e2a15850-8417-40bc-8d7a-55ad976e0e47-utilities\") pod \"redhat-operators-8pkd5\" (UID: \"e2a15850-8417-40bc-8d7a-55ad976e0e47\") " pod="openshift-marketplace/redhat-operators-8pkd5" Sep 29 09:31:52 crc kubenswrapper[4779]: I0929 09:31:52.867194 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gsj7h\" (UniqueName: \"kubernetes.io/projected/e2a15850-8417-40bc-8d7a-55ad976e0e47-kube-api-access-gsj7h\") pod \"redhat-operators-8pkd5\" (UID: \"e2a15850-8417-40bc-8d7a-55ad976e0e47\") " pod="openshift-marketplace/redhat-operators-8pkd5" Sep 29 09:31:52 crc kubenswrapper[4779]: I0929 09:31:52.867274 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e2a15850-8417-40bc-8d7a-55ad976e0e47-catalog-content\") pod \"redhat-operators-8pkd5\" (UID: \"e2a15850-8417-40bc-8d7a-55ad976e0e47\") " pod="openshift-marketplace/redhat-operators-8pkd5" Sep 29 09:31:52 crc kubenswrapper[4779]: I0929 09:31:52.906237 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-8gbfr" Sep 29 09:31:52 crc kubenswrapper[4779]: I0929 09:31:52.906272 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-8gbfr" Sep 29 09:31:52 crc kubenswrapper[4779]: I0929 09:31:52.918267 4779 patch_prober.go:28] interesting pod/console-f9d7485db-8gbfr container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.13:8443/health\": dial tcp 10.217.0.13:8443: connect: connection refused" start-of-body= Sep 29 09:31:52 crc kubenswrapper[4779]: I0929 09:31:52.918332 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-8gbfr" podUID="b61fd91c-c774-44fd-9d5e-114aa59a1b39" containerName="console" probeResult="failure" output="Get \"https://10.217.0.13:8443/health\": dial tcp 10.217.0.13:8443: connect: connection refused" Sep 29 09:31:52 crc kubenswrapper[4779]: I0929 09:31:52.969389 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gsj7h\" (UniqueName: \"kubernetes.io/projected/e2a15850-8417-40bc-8d7a-55ad976e0e47-kube-api-access-gsj7h\") pod \"redhat-operators-8pkd5\" (UID: \"e2a15850-8417-40bc-8d7a-55ad976e0e47\") " pod="openshift-marketplace/redhat-operators-8pkd5" Sep 29 09:31:52 crc kubenswrapper[4779]: I0929 09:31:52.969475 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e2a15850-8417-40bc-8d7a-55ad976e0e47-catalog-content\") pod \"redhat-operators-8pkd5\" (UID: \"e2a15850-8417-40bc-8d7a-55ad976e0e47\") " pod="openshift-marketplace/redhat-operators-8pkd5" Sep 29 09:31:52 crc kubenswrapper[4779]: I0929 09:31:52.969717 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e2a15850-8417-40bc-8d7a-55ad976e0e47-utilities\") pod \"redhat-operators-8pkd5\" (UID: \"e2a15850-8417-40bc-8d7a-55ad976e0e47\") " pod="openshift-marketplace/redhat-operators-8pkd5" Sep 29 09:31:52 crc kubenswrapper[4779]: I0929 09:31:52.971093 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e2a15850-8417-40bc-8d7a-55ad976e0e47-catalog-content\") pod \"redhat-operators-8pkd5\" (UID: \"e2a15850-8417-40bc-8d7a-55ad976e0e47\") " pod="openshift-marketplace/redhat-operators-8pkd5" Sep 29 09:31:52 crc kubenswrapper[4779]: I0929 09:31:52.980078 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e2a15850-8417-40bc-8d7a-55ad976e0e47-utilities\") pod \"redhat-operators-8pkd5\" (UID: \"e2a15850-8417-40bc-8d7a-55ad976e0e47\") " pod="openshift-marketplace/redhat-operators-8pkd5" Sep 29 09:31:52 crc kubenswrapper[4779]: I0929 09:31:52.989542 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gsj7h\" (UniqueName: \"kubernetes.io/projected/e2a15850-8417-40bc-8d7a-55ad976e0e47-kube-api-access-gsj7h\") pod \"redhat-operators-8pkd5\" (UID: \"e2a15850-8417-40bc-8d7a-55ad976e0e47\") " pod="openshift-marketplace/redhat-operators-8pkd5" Sep 29 09:31:53 crc kubenswrapper[4779]: I0929 09:31:53.082647 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8pkd5" Sep 29 09:31:53 crc kubenswrapper[4779]: I0929 09:31:53.106965 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-dvvh4" Sep 29 09:31:53 crc kubenswrapper[4779]: I0929 09:31:53.110563 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-92pg4"] Sep 29 09:31:53 crc kubenswrapper[4779]: I0929 09:31:53.172978 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-bs8sk"] Sep 29 09:31:53 crc kubenswrapper[4779]: I0929 09:31:53.174449 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bs8sk" Sep 29 09:31:53 crc kubenswrapper[4779]: I0929 09:31:53.196576 4779 patch_prober.go:28] interesting pod/downloads-7954f5f757-qpmlh container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Sep 29 09:31:53 crc kubenswrapper[4779]: I0929 09:31:53.196618 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-qpmlh" podUID="d48d8d73-7021-4ebd-8321-b7f73e330c1c" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Sep 29 09:31:53 crc kubenswrapper[4779]: I0929 09:31:53.196735 4779 patch_prober.go:28] interesting pod/downloads-7954f5f757-qpmlh container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Sep 29 09:31:53 crc kubenswrapper[4779]: I0929 09:31:53.196771 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-qpmlh" podUID="d48d8d73-7021-4ebd-8321-b7f73e330c1c" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Sep 29 09:31:53 crc kubenswrapper[4779]: I0929 09:31:53.199056 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-m9sp2" Sep 29 09:31:53 crc kubenswrapper[4779]: I0929 09:31:53.204152 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-bs8sk"] Sep 29 09:31:53 crc kubenswrapper[4779]: I0929 09:31:53.276839 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rxv98\" (UniqueName: \"kubernetes.io/projected/81762cd7-0154-40ce-858e-fb709553ea4d-kube-api-access-rxv98\") pod \"redhat-operators-bs8sk\" (UID: \"81762cd7-0154-40ce-858e-fb709553ea4d\") " pod="openshift-marketplace/redhat-operators-bs8sk" Sep 29 09:31:53 crc kubenswrapper[4779]: I0929 09:31:53.276918 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81762cd7-0154-40ce-858e-fb709553ea4d-catalog-content\") pod \"redhat-operators-bs8sk\" (UID: \"81762cd7-0154-40ce-858e-fb709553ea4d\") " pod="openshift-marketplace/redhat-operators-bs8sk" Sep 29 09:31:53 crc kubenswrapper[4779]: I0929 09:31:53.277095 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81762cd7-0154-40ce-858e-fb709553ea4d-utilities\") pod \"redhat-operators-bs8sk\" (UID: \"81762cd7-0154-40ce-858e-fb709553ea4d\") " pod="openshift-marketplace/redhat-operators-bs8sk" Sep 29 09:31:53 crc kubenswrapper[4779]: I0929 09:31:53.337683 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4xmkd" Sep 29 09:31:53 crc kubenswrapper[4779]: I0929 09:31:53.359457 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-b2xpr" Sep 29 09:31:53 crc kubenswrapper[4779]: I0929 09:31:53.378256 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81762cd7-0154-40ce-858e-fb709553ea4d-catalog-content\") pod \"redhat-operators-bs8sk\" (UID: \"81762cd7-0154-40ce-858e-fb709553ea4d\") " pod="openshift-marketplace/redhat-operators-bs8sk" Sep 29 09:31:53 crc kubenswrapper[4779]: I0929 09:31:53.378349 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81762cd7-0154-40ce-858e-fb709553ea4d-utilities\") pod \"redhat-operators-bs8sk\" (UID: \"81762cd7-0154-40ce-858e-fb709553ea4d\") " pod="openshift-marketplace/redhat-operators-bs8sk" Sep 29 09:31:53 crc kubenswrapper[4779]: I0929 09:31:53.378490 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rxv98\" (UniqueName: \"kubernetes.io/projected/81762cd7-0154-40ce-858e-fb709553ea4d-kube-api-access-rxv98\") pod \"redhat-operators-bs8sk\" (UID: \"81762cd7-0154-40ce-858e-fb709553ea4d\") " pod="openshift-marketplace/redhat-operators-bs8sk" Sep 29 09:31:53 crc kubenswrapper[4779]: I0929 09:31:53.379039 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81762cd7-0154-40ce-858e-fb709553ea4d-catalog-content\") pod \"redhat-operators-bs8sk\" (UID: \"81762cd7-0154-40ce-858e-fb709553ea4d\") " pod="openshift-marketplace/redhat-operators-bs8sk" Sep 29 09:31:53 crc kubenswrapper[4779]: I0929 09:31:53.381731 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81762cd7-0154-40ce-858e-fb709553ea4d-utilities\") pod \"redhat-operators-bs8sk\" (UID: \"81762cd7-0154-40ce-858e-fb709553ea4d\") " pod="openshift-marketplace/redhat-operators-bs8sk" Sep 29 09:31:53 crc kubenswrapper[4779]: I0929 09:31:53.410949 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rxv98\" (UniqueName: \"kubernetes.io/projected/81762cd7-0154-40ce-858e-fb709553ea4d-kube-api-access-rxv98\") pod \"redhat-operators-bs8sk\" (UID: \"81762cd7-0154-40ce-858e-fb709553ea4d\") " pod="openshift-marketplace/redhat-operators-bs8sk" Sep 29 09:31:53 crc kubenswrapper[4779]: I0929 09:31:53.412439 4779 generic.go:334] "Generic (PLEG): container finished" podID="a21410c1-e475-491e-9298-2bbba632e116" containerID="9a06e3c492308c612907c34f3531763addff3f4b78c348d823f78a683e863a56" exitCode=0 Sep 29 09:31:53 crc kubenswrapper[4779]: I0929 09:31:53.412584 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d2rck" event={"ID":"a21410c1-e475-491e-9298-2bbba632e116","Type":"ContainerDied","Data":"9a06e3c492308c612907c34f3531763addff3f4b78c348d823f78a683e863a56"} Sep 29 09:31:53 crc kubenswrapper[4779]: I0929 09:31:53.423638 4779 generic.go:334] "Generic (PLEG): container finished" podID="c9d9ebf4-f861-45c3-aef9-0abc2273c218" containerID="de1253f97cc90679d99f54e9b479637ae96753831b163c66a61b93f27a54a65f" exitCode=0 Sep 29 09:31:53 crc kubenswrapper[4779]: I0929 09:31:53.423725 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"c9d9ebf4-f861-45c3-aef9-0abc2273c218","Type":"ContainerDied","Data":"de1253f97cc90679d99f54e9b479637ae96753831b163c66a61b93f27a54a65f"} Sep 29 09:31:53 crc kubenswrapper[4779]: I0929 09:31:53.456634 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-92pg4" event={"ID":"b32e508f-1532-471b-89cc-3af8e07b50a0","Type":"ContainerStarted","Data":"813d4b8df73690034545df489a9146593831976ee9b308ee3ae311da5a1eee47"} Sep 29 09:31:53 crc kubenswrapper[4779]: I0929 09:31:53.496857 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bs8sk" Sep 29 09:31:53 crc kubenswrapper[4779]: I0929 09:31:53.530318 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-f4jjc" Sep 29 09:31:53 crc kubenswrapper[4779]: I0929 09:31:53.536132 4779 patch_prober.go:28] interesting pod/router-default-5444994796-f4jjc container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 09:31:53 crc kubenswrapper[4779]: [-]has-synced failed: reason withheld Sep 29 09:31:53 crc kubenswrapper[4779]: [+]process-running ok Sep 29 09:31:53 crc kubenswrapper[4779]: healthz check failed Sep 29 09:31:53 crc kubenswrapper[4779]: I0929 09:31:53.536169 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-f4jjc" podUID="8a39cfe4-4366-4e3c-81a1-5a0694840afb" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 09:31:53 crc kubenswrapper[4779]: I0929 09:31:53.836743 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-bs8sk"] Sep 29 09:31:53 crc kubenswrapper[4779]: I0929 09:31:53.867292 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-8pkd5"] Sep 29 09:31:53 crc kubenswrapper[4779]: W0929 09:31:53.881020 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod81762cd7_0154_40ce_858e_fb709553ea4d.slice/crio-22f9a01704dfab1ee55f22cacf8bc61a4ef686d424813b9cbd3a3ba1d9b23e37 WatchSource:0}: Error finding container 22f9a01704dfab1ee55f22cacf8bc61a4ef686d424813b9cbd3a3ba1d9b23e37: Status 404 returned error can't find the container with id 22f9a01704dfab1ee55f22cacf8bc61a4ef686d424813b9cbd3a3ba1d9b23e37 Sep 29 09:31:53 crc kubenswrapper[4779]: W0929 09:31:53.887038 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode2a15850_8417_40bc_8d7a_55ad976e0e47.slice/crio-92e657e2bc7b831fb7d7e284c16d60c408bddac67d62de87db50b5d00deece26 WatchSource:0}: Error finding container 92e657e2bc7b831fb7d7e284c16d60c408bddac67d62de87db50b5d00deece26: Status 404 returned error can't find the container with id 92e657e2bc7b831fb7d7e284c16d60c408bddac67d62de87db50b5d00deece26 Sep 29 09:31:54 crc kubenswrapper[4779]: I0929 09:31:54.478229 4779 generic.go:334] "Generic (PLEG): container finished" podID="b32e508f-1532-471b-89cc-3af8e07b50a0" containerID="b67dea8ecaceff8f7064c698fac897a646ef27129468e785dd84786b25915ff8" exitCode=0 Sep 29 09:31:54 crc kubenswrapper[4779]: I0929 09:31:54.478739 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-92pg4" event={"ID":"b32e508f-1532-471b-89cc-3af8e07b50a0","Type":"ContainerDied","Data":"b67dea8ecaceff8f7064c698fac897a646ef27129468e785dd84786b25915ff8"} Sep 29 09:31:54 crc kubenswrapper[4779]: I0929 09:31:54.483756 4779 generic.go:334] "Generic (PLEG): container finished" podID="81762cd7-0154-40ce-858e-fb709553ea4d" containerID="f78f3d2110d673eb97ebfe50d7fcac8e3b6e388b3fd454b3d092ede74def3192" exitCode=0 Sep 29 09:31:54 crc kubenswrapper[4779]: I0929 09:31:54.483921 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bs8sk" event={"ID":"81762cd7-0154-40ce-858e-fb709553ea4d","Type":"ContainerDied","Data":"f78f3d2110d673eb97ebfe50d7fcac8e3b6e388b3fd454b3d092ede74def3192"} Sep 29 09:31:54 crc kubenswrapper[4779]: I0929 09:31:54.483956 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bs8sk" event={"ID":"81762cd7-0154-40ce-858e-fb709553ea4d","Type":"ContainerStarted","Data":"22f9a01704dfab1ee55f22cacf8bc61a4ef686d424813b9cbd3a3ba1d9b23e37"} Sep 29 09:31:54 crc kubenswrapper[4779]: I0929 09:31:54.487962 4779 generic.go:334] "Generic (PLEG): container finished" podID="e2a15850-8417-40bc-8d7a-55ad976e0e47" containerID="13d9d0ed22e9d20f11e95b6540607ca89b16297736dc274f7beacfef42931b4a" exitCode=0 Sep 29 09:31:54 crc kubenswrapper[4779]: I0929 09:31:54.488007 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8pkd5" event={"ID":"e2a15850-8417-40bc-8d7a-55ad976e0e47","Type":"ContainerDied","Data":"13d9d0ed22e9d20f11e95b6540607ca89b16297736dc274f7beacfef42931b4a"} Sep 29 09:31:54 crc kubenswrapper[4779]: I0929 09:31:54.488049 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8pkd5" event={"ID":"e2a15850-8417-40bc-8d7a-55ad976e0e47","Type":"ContainerStarted","Data":"92e657e2bc7b831fb7d7e284c16d60c408bddac67d62de87db50b5d00deece26"} Sep 29 09:31:54 crc kubenswrapper[4779]: I0929 09:31:54.533512 4779 patch_prober.go:28] interesting pod/router-default-5444994796-f4jjc container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 09:31:54 crc kubenswrapper[4779]: [-]has-synced failed: reason withheld Sep 29 09:31:54 crc kubenswrapper[4779]: [+]process-running ok Sep 29 09:31:54 crc kubenswrapper[4779]: healthz check failed Sep 29 09:31:54 crc kubenswrapper[4779]: I0929 09:31:54.533576 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-f4jjc" podUID="8a39cfe4-4366-4e3c-81a1-5a0694840afb" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 09:31:54 crc kubenswrapper[4779]: I0929 09:31:54.826748 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 09:31:54 crc kubenswrapper[4779]: I0929 09:31:54.906264 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c9d9ebf4-f861-45c3-aef9-0abc2273c218-kube-api-access\") pod \"c9d9ebf4-f861-45c3-aef9-0abc2273c218\" (UID: \"c9d9ebf4-f861-45c3-aef9-0abc2273c218\") " Sep 29 09:31:54 crc kubenswrapper[4779]: I0929 09:31:54.906409 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c9d9ebf4-f861-45c3-aef9-0abc2273c218-kubelet-dir\") pod \"c9d9ebf4-f861-45c3-aef9-0abc2273c218\" (UID: \"c9d9ebf4-f861-45c3-aef9-0abc2273c218\") " Sep 29 09:31:54 crc kubenswrapper[4779]: I0929 09:31:54.906507 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c9d9ebf4-f861-45c3-aef9-0abc2273c218-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "c9d9ebf4-f861-45c3-aef9-0abc2273c218" (UID: "c9d9ebf4-f861-45c3-aef9-0abc2273c218"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 09:31:54 crc kubenswrapper[4779]: I0929 09:31:54.906695 4779 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c9d9ebf4-f861-45c3-aef9-0abc2273c218-kubelet-dir\") on node \"crc\" DevicePath \"\"" Sep 29 09:31:54 crc kubenswrapper[4779]: I0929 09:31:54.911475 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c9d9ebf4-f861-45c3-aef9-0abc2273c218-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "c9d9ebf4-f861-45c3-aef9-0abc2273c218" (UID: "c9d9ebf4-f861-45c3-aef9-0abc2273c218"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:31:55 crc kubenswrapper[4779]: I0929 09:31:55.012628 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c9d9ebf4-f861-45c3-aef9-0abc2273c218-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 29 09:31:55 crc kubenswrapper[4779]: I0929 09:31:55.070262 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Sep 29 09:31:55 crc kubenswrapper[4779]: E0929 09:31:55.070493 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9d9ebf4-f861-45c3-aef9-0abc2273c218" containerName="pruner" Sep 29 09:31:55 crc kubenswrapper[4779]: I0929 09:31:55.070505 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9d9ebf4-f861-45c3-aef9-0abc2273c218" containerName="pruner" Sep 29 09:31:55 crc kubenswrapper[4779]: I0929 09:31:55.070612 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="c9d9ebf4-f861-45c3-aef9-0abc2273c218" containerName="pruner" Sep 29 09:31:55 crc kubenswrapper[4779]: I0929 09:31:55.070975 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 09:31:55 crc kubenswrapper[4779]: I0929 09:31:55.076441 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Sep 29 09:31:55 crc kubenswrapper[4779]: I0929 09:31:55.076636 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Sep 29 09:31:55 crc kubenswrapper[4779]: I0929 09:31:55.077862 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Sep 29 09:31:55 crc kubenswrapper[4779]: I0929 09:31:55.220885 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c68ad83b-d47d-4584-8d9a-b6702d4219e4-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"c68ad83b-d47d-4584-8d9a-b6702d4219e4\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 09:31:55 crc kubenswrapper[4779]: I0929 09:31:55.220991 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c68ad83b-d47d-4584-8d9a-b6702d4219e4-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"c68ad83b-d47d-4584-8d9a-b6702d4219e4\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 09:31:55 crc kubenswrapper[4779]: I0929 09:31:55.322027 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c68ad83b-d47d-4584-8d9a-b6702d4219e4-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"c68ad83b-d47d-4584-8d9a-b6702d4219e4\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 09:31:55 crc kubenswrapper[4779]: I0929 09:31:55.322149 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c68ad83b-d47d-4584-8d9a-b6702d4219e4-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"c68ad83b-d47d-4584-8d9a-b6702d4219e4\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 09:31:55 crc kubenswrapper[4779]: I0929 09:31:55.323131 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c68ad83b-d47d-4584-8d9a-b6702d4219e4-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"c68ad83b-d47d-4584-8d9a-b6702d4219e4\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 09:31:55 crc kubenswrapper[4779]: I0929 09:31:55.351075 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c68ad83b-d47d-4584-8d9a-b6702d4219e4-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"c68ad83b-d47d-4584-8d9a-b6702d4219e4\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 09:31:55 crc kubenswrapper[4779]: I0929 09:31:55.403048 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 09:31:55 crc kubenswrapper[4779]: I0929 09:31:55.525475 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"c9d9ebf4-f861-45c3-aef9-0abc2273c218","Type":"ContainerDied","Data":"4cfc5efe5012787d46779c8fc744fcef981d26d1be4cbc5c6780d1166d37ce5b"} Sep 29 09:31:55 crc kubenswrapper[4779]: I0929 09:31:55.525507 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4cfc5efe5012787d46779c8fc744fcef981d26d1be4cbc5c6780d1166d37ce5b" Sep 29 09:31:55 crc kubenswrapper[4779]: I0929 09:31:55.525578 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 09:31:55 crc kubenswrapper[4779]: I0929 09:31:55.533334 4779 patch_prober.go:28] interesting pod/router-default-5444994796-f4jjc container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 09:31:55 crc kubenswrapper[4779]: [-]has-synced failed: reason withheld Sep 29 09:31:55 crc kubenswrapper[4779]: [+]process-running ok Sep 29 09:31:55 crc kubenswrapper[4779]: healthz check failed Sep 29 09:31:55 crc kubenswrapper[4779]: I0929 09:31:55.533388 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-f4jjc" podUID="8a39cfe4-4366-4e3c-81a1-5a0694840afb" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 09:31:55 crc kubenswrapper[4779]: I0929 09:31:55.922462 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Sep 29 09:31:56 crc kubenswrapper[4779]: I0929 09:31:56.532704 4779 patch_prober.go:28] interesting pod/router-default-5444994796-f4jjc container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 09:31:56 crc kubenswrapper[4779]: [-]has-synced failed: reason withheld Sep 29 09:31:56 crc kubenswrapper[4779]: [+]process-running ok Sep 29 09:31:56 crc kubenswrapper[4779]: healthz check failed Sep 29 09:31:56 crc kubenswrapper[4779]: I0929 09:31:56.532958 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-f4jjc" podUID="8a39cfe4-4366-4e3c-81a1-5a0694840afb" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 09:31:56 crc kubenswrapper[4779]: I0929 09:31:56.543222 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"c68ad83b-d47d-4584-8d9a-b6702d4219e4","Type":"ContainerStarted","Data":"b547acdd88aff9bb55a5b7d9104bfc93d43593df61d113fddf0aa8ee9e8e1cd0"} Sep 29 09:31:56 crc kubenswrapper[4779]: I0929 09:31:56.676489 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:31:56 crc kubenswrapper[4779]: I0929 09:31:56.676632 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:31:56 crc kubenswrapper[4779]: I0929 09:31:56.677490 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:31:56 crc kubenswrapper[4779]: I0929 09:31:56.682649 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:31:56 crc kubenswrapper[4779]: I0929 09:31:56.778063 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:31:56 crc kubenswrapper[4779]: I0929 09:31:56.778113 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:31:56 crc kubenswrapper[4779]: I0929 09:31:56.782215 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:31:56 crc kubenswrapper[4779]: I0929 09:31:56.782335 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:31:56 crc kubenswrapper[4779]: I0929 09:31:56.835530 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 09:31:56 crc kubenswrapper[4779]: I0929 09:31:56.847384 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:31:56 crc kubenswrapper[4779]: I0929 09:31:56.856873 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 09:31:57 crc kubenswrapper[4779]: I0929 09:31:57.532593 4779 patch_prober.go:28] interesting pod/router-default-5444994796-f4jjc container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 09:31:57 crc kubenswrapper[4779]: [-]has-synced failed: reason withheld Sep 29 09:31:57 crc kubenswrapper[4779]: [+]process-running ok Sep 29 09:31:57 crc kubenswrapper[4779]: healthz check failed Sep 29 09:31:57 crc kubenswrapper[4779]: I0929 09:31:57.532641 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-f4jjc" podUID="8a39cfe4-4366-4e3c-81a1-5a0694840afb" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 09:31:58 crc kubenswrapper[4779]: I0929 09:31:58.533022 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-f4jjc" Sep 29 09:31:58 crc kubenswrapper[4779]: I0929 09:31:58.537563 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-f4jjc" Sep 29 09:31:58 crc kubenswrapper[4779]: I0929 09:31:58.712273 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-jx6p6" Sep 29 09:32:02 crc kubenswrapper[4779]: I0929 09:32:02.909633 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-8gbfr" Sep 29 09:32:02 crc kubenswrapper[4779]: I0929 09:32:02.913624 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-8gbfr" Sep 29 09:32:03 crc kubenswrapper[4779]: I0929 09:32:03.199115 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-qpmlh" Sep 29 09:32:03 crc kubenswrapper[4779]: I0929 09:32:03.603481 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"c68ad83b-d47d-4584-8d9a-b6702d4219e4","Type":"ContainerStarted","Data":"efc9a747b87e651fb4ced070f114390a3310c8ef8fa2e7dc3fb841ee5f14114f"} Sep 29 09:32:04 crc kubenswrapper[4779]: I0929 09:32:04.616840 4779 generic.go:334] "Generic (PLEG): container finished" podID="c68ad83b-d47d-4584-8d9a-b6702d4219e4" containerID="efc9a747b87e651fb4ced070f114390a3310c8ef8fa2e7dc3fb841ee5f14114f" exitCode=0 Sep 29 09:32:04 crc kubenswrapper[4779]: I0929 09:32:04.616932 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"c68ad83b-d47d-4584-8d9a-b6702d4219e4","Type":"ContainerDied","Data":"efc9a747b87e651fb4ced070f114390a3310c8ef8fa2e7dc3fb841ee5f14114f"} Sep 29 09:32:07 crc kubenswrapper[4779]: I0929 09:32:07.705060 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 09:32:07 crc kubenswrapper[4779]: I0929 09:32:07.866140 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c68ad83b-d47d-4584-8d9a-b6702d4219e4-kubelet-dir\") pod \"c68ad83b-d47d-4584-8d9a-b6702d4219e4\" (UID: \"c68ad83b-d47d-4584-8d9a-b6702d4219e4\") " Sep 29 09:32:07 crc kubenswrapper[4779]: I0929 09:32:07.866242 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c68ad83b-d47d-4584-8d9a-b6702d4219e4-kube-api-access\") pod \"c68ad83b-d47d-4584-8d9a-b6702d4219e4\" (UID: \"c68ad83b-d47d-4584-8d9a-b6702d4219e4\") " Sep 29 09:32:07 crc kubenswrapper[4779]: I0929 09:32:07.866331 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c68ad83b-d47d-4584-8d9a-b6702d4219e4-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "c68ad83b-d47d-4584-8d9a-b6702d4219e4" (UID: "c68ad83b-d47d-4584-8d9a-b6702d4219e4"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 09:32:07 crc kubenswrapper[4779]: I0929 09:32:07.867231 4779 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c68ad83b-d47d-4584-8d9a-b6702d4219e4-kubelet-dir\") on node \"crc\" DevicePath \"\"" Sep 29 09:32:07 crc kubenswrapper[4779]: I0929 09:32:07.870890 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c68ad83b-d47d-4584-8d9a-b6702d4219e4-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "c68ad83b-d47d-4584-8d9a-b6702d4219e4" (UID: "c68ad83b-d47d-4584-8d9a-b6702d4219e4"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:32:07 crc kubenswrapper[4779]: I0929 09:32:07.968702 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c68ad83b-d47d-4584-8d9a-b6702d4219e4-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 29 09:32:08 crc kubenswrapper[4779]: I0929 09:32:08.640576 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"c68ad83b-d47d-4584-8d9a-b6702d4219e4","Type":"ContainerDied","Data":"b547acdd88aff9bb55a5b7d9104bfc93d43593df61d113fddf0aa8ee9e8e1cd0"} Sep 29 09:32:08 crc kubenswrapper[4779]: I0929 09:32:08.640612 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b547acdd88aff9bb55a5b7d9104bfc93d43593df61d113fddf0aa8ee9e8e1cd0" Sep 29 09:32:08 crc kubenswrapper[4779]: I0929 09:32:08.641129 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 09:32:10 crc kubenswrapper[4779]: I0929 09:32:10.093011 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/294a4484-da93-4c37-9ecf-18f68f4ad64d-metrics-certs\") pod \"network-metrics-daemon-qvlbd\" (UID: \"294a4484-da93-4c37-9ecf-18f68f4ad64d\") " pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:32:10 crc kubenswrapper[4779]: I0929 09:32:10.098787 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/294a4484-da93-4c37-9ecf-18f68f4ad64d-metrics-certs\") pod \"network-metrics-daemon-qvlbd\" (UID: \"294a4484-da93-4c37-9ecf-18f68f4ad64d\") " pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:32:10 crc kubenswrapper[4779]: I0929 09:32:10.187561 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:32:10 crc kubenswrapper[4779]: I0929 09:32:10.328963 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvlbd" Sep 29 09:32:16 crc kubenswrapper[4779]: E0929 09:32:16.803434 4779 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Sep 29 09:32:16 crc kubenswrapper[4779]: E0929 09:32:16.804072 4779 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-8p7w2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-z68v6_openshift-marketplace(d480d7e4-9bc7-40ed-ab03-d091d67a7a9a): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 29 09:32:16 crc kubenswrapper[4779]: E0929 09:32:16.805288 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-z68v6" podUID="d480d7e4-9bc7-40ed-ab03-d091d67a7a9a" Sep 29 09:32:16 crc kubenswrapper[4779]: I0929 09:32:16.966689 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 09:32:16 crc kubenswrapper[4779]: I0929 09:32:16.966758 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 09:32:17 crc kubenswrapper[4779]: E0929 09:32:17.729584 4779 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Sep 29 09:32:17 crc kubenswrapper[4779]: E0929 09:32:17.730092 4779 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-m9xsj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-kxkqv_openshift-marketplace(57fa213e-7e5b-451b-a6b1-d03c20a64e0f): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 29 09:32:17 crc kubenswrapper[4779]: E0929 09:32:17.731350 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-kxkqv" podUID="57fa213e-7e5b-451b-a6b1-d03c20a64e0f" Sep 29 09:32:18 crc kubenswrapper[4779]: E0929 09:32:18.190337 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-z68v6" podUID="d480d7e4-9bc7-40ed-ab03-d091d67a7a9a" Sep 29 09:32:18 crc kubenswrapper[4779]: E0929 09:32:18.253822 4779 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Sep 29 09:32:18 crc kubenswrapper[4779]: E0929 09:32:18.253981 4779 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-l98js,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-92pg4_openshift-marketplace(b32e508f-1532-471b-89cc-3af8e07b50a0): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 29 09:32:18 crc kubenswrapper[4779]: E0929 09:32:18.255122 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-92pg4" podUID="b32e508f-1532-471b-89cc-3af8e07b50a0" Sep 29 09:32:18 crc kubenswrapper[4779]: E0929 09:32:18.273196 4779 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Sep 29 09:32:18 crc kubenswrapper[4779]: E0929 09:32:18.273456 4779 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-xdnpg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-d2rck_openshift-marketplace(a21410c1-e475-491e-9298-2bbba632e116): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 29 09:32:18 crc kubenswrapper[4779]: E0929 09:32:18.274638 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-d2rck" podUID="a21410c1-e475-491e-9298-2bbba632e116" Sep 29 09:32:19 crc kubenswrapper[4779]: E0929 09:32:19.365077 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-92pg4" podUID="b32e508f-1532-471b-89cc-3af8e07b50a0" Sep 29 09:32:19 crc kubenswrapper[4779]: E0929 09:32:19.365253 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-d2rck" podUID="a21410c1-e475-491e-9298-2bbba632e116" Sep 29 09:32:19 crc kubenswrapper[4779]: E0929 09:32:19.365282 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-kxkqv" podUID="57fa213e-7e5b-451b-a6b1-d03c20a64e0f" Sep 29 09:32:19 crc kubenswrapper[4779]: E0929 09:32:19.450662 4779 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Sep 29 09:32:19 crc kubenswrapper[4779]: E0929 09:32:19.450766 4779 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2p6jj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-rmf6p_openshift-marketplace(7db7e11b-22e3-45bf-a365-0a3583e0d52d): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 29 09:32:19 crc kubenswrapper[4779]: E0929 09:32:19.453127 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-rmf6p" podUID="7db7e11b-22e3-45bf-a365-0a3583e0d52d" Sep 29 09:32:19 crc kubenswrapper[4779]: E0929 09:32:19.463149 4779 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Sep 29 09:32:19 crc kubenswrapper[4779]: E0929 09:32:19.463299 4779 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-6xcmd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-7dw6m_openshift-marketplace(06a364e4-0ea6-4fe1-bc83-7c98333e2163): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 29 09:32:19 crc kubenswrapper[4779]: E0929 09:32:19.464596 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-7dw6m" podUID="06a364e4-0ea6-4fe1-bc83-7c98333e2163" Sep 29 09:32:19 crc kubenswrapper[4779]: E0929 09:32:19.466033 4779 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Sep 29 09:32:19 crc kubenswrapper[4779]: E0929 09:32:19.466131 4779 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rxv98,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-bs8sk_openshift-marketplace(81762cd7-0154-40ce-858e-fb709553ea4d): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 29 09:32:19 crc kubenswrapper[4779]: E0929 09:32:19.467190 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-bs8sk" podUID="81762cd7-0154-40ce-858e-fb709553ea4d" Sep 29 09:32:19 crc kubenswrapper[4779]: W0929 09:32:19.647575 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9d751cbb_f2e2_430d_9754_c882a5e924a5.slice/crio-451bbe48c80352cff2d99a24b3c1a116bc5ef46fdc96a9469f45d70ab7ab0d2a WatchSource:0}: Error finding container 451bbe48c80352cff2d99a24b3c1a116bc5ef46fdc96a9469f45d70ab7ab0d2a: Status 404 returned error can't find the container with id 451bbe48c80352cff2d99a24b3c1a116bc5ef46fdc96a9469f45d70ab7ab0d2a Sep 29 09:32:19 crc kubenswrapper[4779]: I0929 09:32:19.713072 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"451bbe48c80352cff2d99a24b3c1a116bc5ef46fdc96a9469f45d70ab7ab0d2a"} Sep 29 09:32:19 crc kubenswrapper[4779]: I0929 09:32:19.714872 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8pkd5" event={"ID":"e2a15850-8417-40bc-8d7a-55ad976e0e47","Type":"ContainerStarted","Data":"b41e530e26fbef13431ca6b71c9e061de9db54d3aa9b571fc42585143f5d5295"} Sep 29 09:32:19 crc kubenswrapper[4779]: E0929 09:32:19.717497 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-7dw6m" podUID="06a364e4-0ea6-4fe1-bc83-7c98333e2163" Sep 29 09:32:19 crc kubenswrapper[4779]: E0929 09:32:19.717643 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-rmf6p" podUID="7db7e11b-22e3-45bf-a365-0a3583e0d52d" Sep 29 09:32:19 crc kubenswrapper[4779]: E0929 09:32:19.723656 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-bs8sk" podUID="81762cd7-0154-40ce-858e-fb709553ea4d" Sep 29 09:32:19 crc kubenswrapper[4779]: W0929 09:32:19.794803 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5fe485a1_e14f_4c09_b5b9_f252bc42b7e8.slice/crio-983c11fb5a854717af7b1970384bb071f0e5348419bf74c4677ca09b384f741b WatchSource:0}: Error finding container 983c11fb5a854717af7b1970384bb071f0e5348419bf74c4677ca09b384f741b: Status 404 returned error can't find the container with id 983c11fb5a854717af7b1970384bb071f0e5348419bf74c4677ca09b384f741b Sep 29 09:32:19 crc kubenswrapper[4779]: I0929 09:32:19.871354 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-qvlbd"] Sep 29 09:32:19 crc kubenswrapper[4779]: W0929 09:32:19.882172 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod294a4484_da93_4c37_9ecf_18f68f4ad64d.slice/crio-a3b412fd28916bac795ee972061a59ff481a4cbf9a15183495c3018d7c60b5fe WatchSource:0}: Error finding container a3b412fd28916bac795ee972061a59ff481a4cbf9a15183495c3018d7c60b5fe: Status 404 returned error can't find the container with id a3b412fd28916bac795ee972061a59ff481a4cbf9a15183495c3018d7c60b5fe Sep 29 09:32:19 crc kubenswrapper[4779]: W0929 09:32:19.885029 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3b6479f0_333b_4a96_9adf_2099afdc2447.slice/crio-bf8ad45b74e7227be11a60fbdee5b54f88476877b0f3e01ad3790cf31b5c750c WatchSource:0}: Error finding container bf8ad45b74e7227be11a60fbdee5b54f88476877b0f3e01ad3790cf31b5c750c: Status 404 returned error can't find the container with id bf8ad45b74e7227be11a60fbdee5b54f88476877b0f3e01ad3790cf31b5c750c Sep 29 09:32:20 crc kubenswrapper[4779]: I0929 09:32:20.722352 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"fa3ba3c2bde67444b067cb3f003e359824aee49b93a74649ff55d759c3f73f7d"} Sep 29 09:32:20 crc kubenswrapper[4779]: I0929 09:32:20.723006 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"983c11fb5a854717af7b1970384bb071f0e5348419bf74c4677ca09b384f741b"} Sep 29 09:32:20 crc kubenswrapper[4779]: I0929 09:32:20.724417 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"4563e1ffa71e8865a9fb18f6261b28868a3aa66f99ddf0c53a6e626c1ec8dde4"} Sep 29 09:32:20 crc kubenswrapper[4779]: I0929 09:32:20.727115 4779 generic.go:334] "Generic (PLEG): container finished" podID="e2a15850-8417-40bc-8d7a-55ad976e0e47" containerID="b41e530e26fbef13431ca6b71c9e061de9db54d3aa9b571fc42585143f5d5295" exitCode=0 Sep 29 09:32:20 crc kubenswrapper[4779]: I0929 09:32:20.727211 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8pkd5" event={"ID":"e2a15850-8417-40bc-8d7a-55ad976e0e47","Type":"ContainerDied","Data":"b41e530e26fbef13431ca6b71c9e061de9db54d3aa9b571fc42585143f5d5295"} Sep 29 09:32:20 crc kubenswrapper[4779]: I0929 09:32:20.729470 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-qvlbd" event={"ID":"294a4484-da93-4c37-9ecf-18f68f4ad64d","Type":"ContainerStarted","Data":"5859b9bead858a83190e105d47c3e398184260803cbe70a5d8294b96706efdb5"} Sep 29 09:32:20 crc kubenswrapper[4779]: I0929 09:32:20.729504 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-qvlbd" event={"ID":"294a4484-da93-4c37-9ecf-18f68f4ad64d","Type":"ContainerStarted","Data":"0eebd8cf453f270786476657a4fe0e609a2f340379f27ba0b853b8cfeda0e1d8"} Sep 29 09:32:20 crc kubenswrapper[4779]: I0929 09:32:20.729515 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-qvlbd" event={"ID":"294a4484-da93-4c37-9ecf-18f68f4ad64d","Type":"ContainerStarted","Data":"a3b412fd28916bac795ee972061a59ff481a4cbf9a15183495c3018d7c60b5fe"} Sep 29 09:32:20 crc kubenswrapper[4779]: I0929 09:32:20.732828 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"bc657807cfdcaea509207226cd68a70a1c4428e953ec90f81c845d7e99c6eac3"} Sep 29 09:32:20 crc kubenswrapper[4779]: I0929 09:32:20.732860 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"bf8ad45b74e7227be11a60fbdee5b54f88476877b0f3e01ad3790cf31b5c750c"} Sep 29 09:32:20 crc kubenswrapper[4779]: I0929 09:32:20.733094 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:32:20 crc kubenswrapper[4779]: I0929 09:32:20.795879 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-qvlbd" podStartSLOduration=152.795858259 podStartE2EDuration="2m32.795858259s" podCreationTimestamp="2025-09-29 09:29:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:32:20.794104625 +0000 UTC m=+172.775428529" watchObservedRunningTime="2025-09-29 09:32:20.795858259 +0000 UTC m=+172.777182173" Sep 29 09:32:21 crc kubenswrapper[4779]: I0929 09:32:21.743797 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8pkd5" event={"ID":"e2a15850-8417-40bc-8d7a-55ad976e0e47","Type":"ContainerStarted","Data":"ec765e95f83be37c1e1c12a891a560b6397519cf394c96396f8bab65f19d253c"} Sep 29 09:32:21 crc kubenswrapper[4779]: I0929 09:32:21.767991 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-8pkd5" podStartSLOduration=2.979336897 podStartE2EDuration="29.767968906s" podCreationTimestamp="2025-09-29 09:31:52 +0000 UTC" firstStartedPulling="2025-09-29 09:31:54.491599207 +0000 UTC m=+146.472923111" lastFinishedPulling="2025-09-29 09:32:21.280231196 +0000 UTC m=+173.261555120" observedRunningTime="2025-09-29 09:32:21.764563541 +0000 UTC m=+173.745887485" watchObservedRunningTime="2025-09-29 09:32:21.767968906 +0000 UTC m=+173.749292840" Sep 29 09:32:23 crc kubenswrapper[4779]: I0929 09:32:23.083934 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-8pkd5" Sep 29 09:32:23 crc kubenswrapper[4779]: I0929 09:32:23.084452 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-8pkd5" Sep 29 09:32:23 crc kubenswrapper[4779]: I0929 09:32:23.728827 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qwpch" Sep 29 09:32:24 crc kubenswrapper[4779]: I0929 09:32:24.223454 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-8pkd5" podUID="e2a15850-8417-40bc-8d7a-55ad976e0e47" containerName="registry-server" probeResult="failure" output=< Sep 29 09:32:24 crc kubenswrapper[4779]: timeout: failed to connect service ":50051" within 1s Sep 29 09:32:24 crc kubenswrapper[4779]: > Sep 29 09:32:30 crc kubenswrapper[4779]: I0929 09:32:30.805847 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z68v6" event={"ID":"d480d7e4-9bc7-40ed-ab03-d091d67a7a9a","Type":"ContainerStarted","Data":"91cc7b454fe6dddf4e4eb073fe4aa38eef8d68a13022deee2df7c3921107d8ce"} Sep 29 09:32:31 crc kubenswrapper[4779]: I0929 09:32:31.812014 4779 generic.go:334] "Generic (PLEG): container finished" podID="d480d7e4-9bc7-40ed-ab03-d091d67a7a9a" containerID="91cc7b454fe6dddf4e4eb073fe4aa38eef8d68a13022deee2df7c3921107d8ce" exitCode=0 Sep 29 09:32:31 crc kubenswrapper[4779]: I0929 09:32:31.812073 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z68v6" event={"ID":"d480d7e4-9bc7-40ed-ab03-d091d67a7a9a","Type":"ContainerDied","Data":"91cc7b454fe6dddf4e4eb073fe4aa38eef8d68a13022deee2df7c3921107d8ce"} Sep 29 09:32:32 crc kubenswrapper[4779]: I0929 09:32:32.819040 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z68v6" event={"ID":"d480d7e4-9bc7-40ed-ab03-d091d67a7a9a","Type":"ContainerStarted","Data":"a1ed42357c550a1f773b7b75502324a32c8a15da49a6d36bf7c4984def499011"} Sep 29 09:32:32 crc kubenswrapper[4779]: I0929 09:32:32.821157 4779 generic.go:334] "Generic (PLEG): container finished" podID="57fa213e-7e5b-451b-a6b1-d03c20a64e0f" containerID="37055ea06b05a121c1a0ee192fb1db7ddf7cfc6089a41bf58993a2c3f54c9b75" exitCode=0 Sep 29 09:32:32 crc kubenswrapper[4779]: I0929 09:32:32.821221 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kxkqv" event={"ID":"57fa213e-7e5b-451b-a6b1-d03c20a64e0f","Type":"ContainerDied","Data":"37055ea06b05a121c1a0ee192fb1db7ddf7cfc6089a41bf58993a2c3f54c9b75"} Sep 29 09:32:32 crc kubenswrapper[4779]: I0929 09:32:32.823323 4779 generic.go:334] "Generic (PLEG): container finished" podID="b32e508f-1532-471b-89cc-3af8e07b50a0" containerID="39ede4e4c5862e2cd536fd342922353eda934e4c96437a743b1355033a10bb2b" exitCode=0 Sep 29 09:32:32 crc kubenswrapper[4779]: I0929 09:32:32.823350 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-92pg4" event={"ID":"b32e508f-1532-471b-89cc-3af8e07b50a0","Type":"ContainerDied","Data":"39ede4e4c5862e2cd536fd342922353eda934e4c96437a743b1355033a10bb2b"} Sep 29 09:32:32 crc kubenswrapper[4779]: I0929 09:32:32.856604 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-z68v6" podStartSLOduration=1.962329001 podStartE2EDuration="43.856584713s" podCreationTimestamp="2025-09-29 09:31:49 +0000 UTC" firstStartedPulling="2025-09-29 09:31:50.333314281 +0000 UTC m=+142.314638185" lastFinishedPulling="2025-09-29 09:32:32.227569993 +0000 UTC m=+184.208893897" observedRunningTime="2025-09-29 09:32:32.8391751 +0000 UTC m=+184.820499004" watchObservedRunningTime="2025-09-29 09:32:32.856584713 +0000 UTC m=+184.837908617" Sep 29 09:32:33 crc kubenswrapper[4779]: I0929 09:32:33.132756 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-8pkd5" Sep 29 09:32:33 crc kubenswrapper[4779]: I0929 09:32:33.175759 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-8pkd5" Sep 29 09:32:33 crc kubenswrapper[4779]: I0929 09:32:33.845930 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bs8sk" event={"ID":"81762cd7-0154-40ce-858e-fb709553ea4d","Type":"ContainerStarted","Data":"dca8bb4e86b023c1845067b759e53d111b88c379147d4457342eda88458bbf5c"} Sep 29 09:32:33 crc kubenswrapper[4779]: I0929 09:32:33.849531 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kxkqv" event={"ID":"57fa213e-7e5b-451b-a6b1-d03c20a64e0f","Type":"ContainerStarted","Data":"d985e097bdb5f50e009ca4ba16564e56af1450f3c296bd923ff1cf4f5e44894e"} Sep 29 09:32:33 crc kubenswrapper[4779]: I0929 09:32:33.854724 4779 generic.go:334] "Generic (PLEG): container finished" podID="06a364e4-0ea6-4fe1-bc83-7c98333e2163" containerID="dda5b30a1b7f0a619784bfb2e1806fa50d9acb5c770d84639121af48eea1bbdf" exitCode=0 Sep 29 09:32:33 crc kubenswrapper[4779]: I0929 09:32:33.854810 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7dw6m" event={"ID":"06a364e4-0ea6-4fe1-bc83-7c98333e2163","Type":"ContainerDied","Data":"dda5b30a1b7f0a619784bfb2e1806fa50d9acb5c770d84639121af48eea1bbdf"} Sep 29 09:32:33 crc kubenswrapper[4779]: I0929 09:32:33.857030 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-92pg4" event={"ID":"b32e508f-1532-471b-89cc-3af8e07b50a0","Type":"ContainerStarted","Data":"f67dc6e02e56360246078e64b4462b41e541fb91bf0297cdf4d4ba7c5cbbf161"} Sep 29 09:32:33 crc kubenswrapper[4779]: I0929 09:32:33.858688 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rmf6p" event={"ID":"7db7e11b-22e3-45bf-a365-0a3583e0d52d","Type":"ContainerStarted","Data":"6f0c020b15aa3dd056bf86fb132eb7d348d0f83a55d3a9432221c89174ff24c2"} Sep 29 09:32:33 crc kubenswrapper[4779]: I0929 09:32:33.911876 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-92pg4" podStartSLOduration=3.071890244 podStartE2EDuration="41.911851529s" podCreationTimestamp="2025-09-29 09:31:52 +0000 UTC" firstStartedPulling="2025-09-29 09:31:54.480448505 +0000 UTC m=+146.461772409" lastFinishedPulling="2025-09-29 09:32:33.32040978 +0000 UTC m=+185.301733694" observedRunningTime="2025-09-29 09:32:33.908755614 +0000 UTC m=+185.890079528" watchObservedRunningTime="2025-09-29 09:32:33.911851529 +0000 UTC m=+185.893175433" Sep 29 09:32:33 crc kubenswrapper[4779]: I0929 09:32:33.946266 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-kxkqv" podStartSLOduration=2.945573279 podStartE2EDuration="44.946249603s" podCreationTimestamp="2025-09-29 09:31:49 +0000 UTC" firstStartedPulling="2025-09-29 09:31:51.356567195 +0000 UTC m=+143.337891139" lastFinishedPulling="2025-09-29 09:32:33.357243559 +0000 UTC m=+185.338567463" observedRunningTime="2025-09-29 09:32:33.946029986 +0000 UTC m=+185.927353900" watchObservedRunningTime="2025-09-29 09:32:33.946249603 +0000 UTC m=+185.927573507" Sep 29 09:32:34 crc kubenswrapper[4779]: I0929 09:32:34.864816 4779 generic.go:334] "Generic (PLEG): container finished" podID="7db7e11b-22e3-45bf-a365-0a3583e0d52d" containerID="6f0c020b15aa3dd056bf86fb132eb7d348d0f83a55d3a9432221c89174ff24c2" exitCode=0 Sep 29 09:32:34 crc kubenswrapper[4779]: I0929 09:32:34.865005 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rmf6p" event={"ID":"7db7e11b-22e3-45bf-a365-0a3583e0d52d","Type":"ContainerDied","Data":"6f0c020b15aa3dd056bf86fb132eb7d348d0f83a55d3a9432221c89174ff24c2"} Sep 29 09:32:34 crc kubenswrapper[4779]: I0929 09:32:34.870972 4779 generic.go:334] "Generic (PLEG): container finished" podID="81762cd7-0154-40ce-858e-fb709553ea4d" containerID="dca8bb4e86b023c1845067b759e53d111b88c379147d4457342eda88458bbf5c" exitCode=0 Sep 29 09:32:34 crc kubenswrapper[4779]: I0929 09:32:34.871066 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bs8sk" event={"ID":"81762cd7-0154-40ce-858e-fb709553ea4d","Type":"ContainerDied","Data":"dca8bb4e86b023c1845067b759e53d111b88c379147d4457342eda88458bbf5c"} Sep 29 09:32:34 crc kubenswrapper[4779]: I0929 09:32:34.881432 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7dw6m" event={"ID":"06a364e4-0ea6-4fe1-bc83-7c98333e2163","Type":"ContainerStarted","Data":"09c89c8f8c7941359b61ddcc5ec2c0027b26c01929d6bd19eb98610442a03859"} Sep 29 09:32:34 crc kubenswrapper[4779]: I0929 09:32:34.911224 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-7dw6m" podStartSLOduration=1.908761089 podStartE2EDuration="44.911204879s" podCreationTimestamp="2025-09-29 09:31:50 +0000 UTC" firstStartedPulling="2025-09-29 09:31:51.363747485 +0000 UTC m=+143.345071419" lastFinishedPulling="2025-09-29 09:32:34.366191305 +0000 UTC m=+186.347515209" observedRunningTime="2025-09-29 09:32:34.910887879 +0000 UTC m=+186.892211803" watchObservedRunningTime="2025-09-29 09:32:34.911204879 +0000 UTC m=+186.892528783" Sep 29 09:32:35 crc kubenswrapper[4779]: I0929 09:32:35.901302 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rmf6p" event={"ID":"7db7e11b-22e3-45bf-a365-0a3583e0d52d","Type":"ContainerStarted","Data":"5e92500e82c512735102892bc85994c566b09dcd679c4d69f13b8a72df84371f"} Sep 29 09:32:35 crc kubenswrapper[4779]: I0929 09:32:35.903789 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bs8sk" event={"ID":"81762cd7-0154-40ce-858e-fb709553ea4d","Type":"ContainerStarted","Data":"f2fee8927abe93d0200464bbca3a3ff105da69cd3237c8c9ce148bb904a8b14b"} Sep 29 09:32:35 crc kubenswrapper[4779]: I0929 09:32:35.908954 4779 generic.go:334] "Generic (PLEG): container finished" podID="a21410c1-e475-491e-9298-2bbba632e116" containerID="6b671894e65af467d354cac02d14d4b408c9ff506710b63e3812b0294fd8194a" exitCode=0 Sep 29 09:32:35 crc kubenswrapper[4779]: I0929 09:32:35.908995 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d2rck" event={"ID":"a21410c1-e475-491e-9298-2bbba632e116","Type":"ContainerDied","Data":"6b671894e65af467d354cac02d14d4b408c9ff506710b63e3812b0294fd8194a"} Sep 29 09:32:35 crc kubenswrapper[4779]: I0929 09:32:35.924570 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-rmf6p" podStartSLOduration=2.794497118 podStartE2EDuration="46.924553539s" podCreationTimestamp="2025-09-29 09:31:49 +0000 UTC" firstStartedPulling="2025-09-29 09:31:51.338374428 +0000 UTC m=+143.319698332" lastFinishedPulling="2025-09-29 09:32:35.468430849 +0000 UTC m=+187.449754753" observedRunningTime="2025-09-29 09:32:35.921877097 +0000 UTC m=+187.903201011" watchObservedRunningTime="2025-09-29 09:32:35.924553539 +0000 UTC m=+187.905877443" Sep 29 09:32:35 crc kubenswrapper[4779]: I0929 09:32:35.951483 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-bs8sk" podStartSLOduration=2.16313513 podStartE2EDuration="42.951466744s" podCreationTimestamp="2025-09-29 09:31:53 +0000 UTC" firstStartedPulling="2025-09-29 09:31:54.485766058 +0000 UTC m=+146.467089962" lastFinishedPulling="2025-09-29 09:32:35.274097672 +0000 UTC m=+187.255421576" observedRunningTime="2025-09-29 09:32:35.948866945 +0000 UTC m=+187.930190879" watchObservedRunningTime="2025-09-29 09:32:35.951466744 +0000 UTC m=+187.932790648" Sep 29 09:32:36 crc kubenswrapper[4779]: I0929 09:32:36.915115 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d2rck" event={"ID":"a21410c1-e475-491e-9298-2bbba632e116","Type":"ContainerStarted","Data":"ca564e6c76ad08b6bb1f8b117f35b3f718a613c4cb4d3d8d447c4a352d8a49da"} Sep 29 09:32:36 crc kubenswrapper[4779]: I0929 09:32:36.932577 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-d2rck" podStartSLOduration=2.982694759 podStartE2EDuration="45.932559846s" podCreationTimestamp="2025-09-29 09:31:51 +0000 UTC" firstStartedPulling="2025-09-29 09:31:53.41766936 +0000 UTC m=+145.398993264" lastFinishedPulling="2025-09-29 09:32:36.367534447 +0000 UTC m=+188.348858351" observedRunningTime="2025-09-29 09:32:36.92941129 +0000 UTC m=+188.910735214" watchObservedRunningTime="2025-09-29 09:32:36.932559846 +0000 UTC m=+188.913883750" Sep 29 09:32:39 crc kubenswrapper[4779]: I0929 09:32:39.875040 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-z68v6" Sep 29 09:32:39 crc kubenswrapper[4779]: I0929 09:32:39.875493 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-z68v6" Sep 29 09:32:39 crc kubenswrapper[4779]: I0929 09:32:39.912834 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-z68v6" Sep 29 09:32:39 crc kubenswrapper[4779]: I0929 09:32:39.976459 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-z68v6" Sep 29 09:32:40 crc kubenswrapper[4779]: I0929 09:32:40.067369 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-rmf6p" Sep 29 09:32:40 crc kubenswrapper[4779]: I0929 09:32:40.067415 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-rmf6p" Sep 29 09:32:40 crc kubenswrapper[4779]: I0929 09:32:40.123564 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-rmf6p" Sep 29 09:32:40 crc kubenswrapper[4779]: I0929 09:32:40.277873 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-kxkqv" Sep 29 09:32:40 crc kubenswrapper[4779]: I0929 09:32:40.277947 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-kxkqv" Sep 29 09:32:40 crc kubenswrapper[4779]: I0929 09:32:40.323534 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-kxkqv" Sep 29 09:32:40 crc kubenswrapper[4779]: I0929 09:32:40.467258 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-7dw6m" Sep 29 09:32:40 crc kubenswrapper[4779]: I0929 09:32:40.467307 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-7dw6m" Sep 29 09:32:40 crc kubenswrapper[4779]: I0929 09:32:40.520264 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-7dw6m" Sep 29 09:32:40 crc kubenswrapper[4779]: I0929 09:32:40.974186 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-7dw6m" Sep 29 09:32:40 crc kubenswrapper[4779]: I0929 09:32:40.976487 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-kxkqv" Sep 29 09:32:40 crc kubenswrapper[4779]: I0929 09:32:40.983122 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-rmf6p" Sep 29 09:32:41 crc kubenswrapper[4779]: I0929 09:32:41.769138 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-dvvh4"] Sep 29 09:32:42 crc kubenswrapper[4779]: I0929 09:32:42.072942 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-d2rck" Sep 29 09:32:42 crc kubenswrapper[4779]: I0929 09:32:42.072991 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-d2rck" Sep 29 09:32:42 crc kubenswrapper[4779]: I0929 09:32:42.108205 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-d2rck" Sep 29 09:32:42 crc kubenswrapper[4779]: I0929 09:32:42.455081 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-kxkqv"] Sep 29 09:32:42 crc kubenswrapper[4779]: I0929 09:32:42.471788 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-92pg4" Sep 29 09:32:42 crc kubenswrapper[4779]: I0929 09:32:42.471843 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-92pg4" Sep 29 09:32:42 crc kubenswrapper[4779]: I0929 09:32:42.532991 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-92pg4" Sep 29 09:32:42 crc kubenswrapper[4779]: I0929 09:32:42.942793 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-kxkqv" podUID="57fa213e-7e5b-451b-a6b1-d03c20a64e0f" containerName="registry-server" containerID="cri-o://d985e097bdb5f50e009ca4ba16564e56af1450f3c296bd923ff1cf4f5e44894e" gracePeriod=2 Sep 29 09:32:42 crc kubenswrapper[4779]: I0929 09:32:42.982544 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-d2rck" Sep 29 09:32:42 crc kubenswrapper[4779]: I0929 09:32:42.995709 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-92pg4" Sep 29 09:32:43 crc kubenswrapper[4779]: I0929 09:32:43.308521 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kxkqv" Sep 29 09:32:43 crc kubenswrapper[4779]: I0929 09:32:43.382863 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m9xsj\" (UniqueName: \"kubernetes.io/projected/57fa213e-7e5b-451b-a6b1-d03c20a64e0f-kube-api-access-m9xsj\") pod \"57fa213e-7e5b-451b-a6b1-d03c20a64e0f\" (UID: \"57fa213e-7e5b-451b-a6b1-d03c20a64e0f\") " Sep 29 09:32:43 crc kubenswrapper[4779]: I0929 09:32:43.384435 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57fa213e-7e5b-451b-a6b1-d03c20a64e0f-utilities\") pod \"57fa213e-7e5b-451b-a6b1-d03c20a64e0f\" (UID: \"57fa213e-7e5b-451b-a6b1-d03c20a64e0f\") " Sep 29 09:32:43 crc kubenswrapper[4779]: I0929 09:32:43.384579 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57fa213e-7e5b-451b-a6b1-d03c20a64e0f-catalog-content\") pod \"57fa213e-7e5b-451b-a6b1-d03c20a64e0f\" (UID: \"57fa213e-7e5b-451b-a6b1-d03c20a64e0f\") " Sep 29 09:32:43 crc kubenswrapper[4779]: I0929 09:32:43.385950 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57fa213e-7e5b-451b-a6b1-d03c20a64e0f-utilities" (OuterVolumeSpecName: "utilities") pod "57fa213e-7e5b-451b-a6b1-d03c20a64e0f" (UID: "57fa213e-7e5b-451b-a6b1-d03c20a64e0f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:32:43 crc kubenswrapper[4779]: I0929 09:32:43.389898 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57fa213e-7e5b-451b-a6b1-d03c20a64e0f-kube-api-access-m9xsj" (OuterVolumeSpecName: "kube-api-access-m9xsj") pod "57fa213e-7e5b-451b-a6b1-d03c20a64e0f" (UID: "57fa213e-7e5b-451b-a6b1-d03c20a64e0f"). InnerVolumeSpecName "kube-api-access-m9xsj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:32:43 crc kubenswrapper[4779]: I0929 09:32:43.486014 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m9xsj\" (UniqueName: \"kubernetes.io/projected/57fa213e-7e5b-451b-a6b1-d03c20a64e0f-kube-api-access-m9xsj\") on node \"crc\" DevicePath \"\"" Sep 29 09:32:43 crc kubenswrapper[4779]: I0929 09:32:43.486055 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57fa213e-7e5b-451b-a6b1-d03c20a64e0f-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 09:32:43 crc kubenswrapper[4779]: I0929 09:32:43.497820 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-bs8sk" Sep 29 09:32:43 crc kubenswrapper[4779]: I0929 09:32:43.497853 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-bs8sk" Sep 29 09:32:43 crc kubenswrapper[4779]: I0929 09:32:43.546089 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-bs8sk" Sep 29 09:32:43 crc kubenswrapper[4779]: I0929 09:32:43.551844 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57fa213e-7e5b-451b-a6b1-d03c20a64e0f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57fa213e-7e5b-451b-a6b1-d03c20a64e0f" (UID: "57fa213e-7e5b-451b-a6b1-d03c20a64e0f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:32:43 crc kubenswrapper[4779]: I0929 09:32:43.587424 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57fa213e-7e5b-451b-a6b1-d03c20a64e0f-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 09:32:43 crc kubenswrapper[4779]: I0929 09:32:43.839926 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-92pg4"] Sep 29 09:32:43 crc kubenswrapper[4779]: I0929 09:32:43.948452 4779 generic.go:334] "Generic (PLEG): container finished" podID="57fa213e-7e5b-451b-a6b1-d03c20a64e0f" containerID="d985e097bdb5f50e009ca4ba16564e56af1450f3c296bd923ff1cf4f5e44894e" exitCode=0 Sep 29 09:32:43 crc kubenswrapper[4779]: I0929 09:32:43.948499 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kxkqv" Sep 29 09:32:43 crc kubenswrapper[4779]: I0929 09:32:43.948516 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kxkqv" event={"ID":"57fa213e-7e5b-451b-a6b1-d03c20a64e0f","Type":"ContainerDied","Data":"d985e097bdb5f50e009ca4ba16564e56af1450f3c296bd923ff1cf4f5e44894e"} Sep 29 09:32:43 crc kubenswrapper[4779]: I0929 09:32:43.948852 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kxkqv" event={"ID":"57fa213e-7e5b-451b-a6b1-d03c20a64e0f","Type":"ContainerDied","Data":"a231cb18b883f08e7f6d3f834b9e670061fe3677a0d8ba31a3e8a7f9b57624bb"} Sep 29 09:32:43 crc kubenswrapper[4779]: I0929 09:32:43.948930 4779 scope.go:117] "RemoveContainer" containerID="d985e097bdb5f50e009ca4ba16564e56af1450f3c296bd923ff1cf4f5e44894e" Sep 29 09:32:43 crc kubenswrapper[4779]: I0929 09:32:43.974626 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-kxkqv"] Sep 29 09:32:43 crc kubenswrapper[4779]: I0929 09:32:43.979187 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-kxkqv"] Sep 29 09:32:43 crc kubenswrapper[4779]: I0929 09:32:43.990779 4779 scope.go:117] "RemoveContainer" containerID="37055ea06b05a121c1a0ee192fb1db7ddf7cfc6089a41bf58993a2c3f54c9b75" Sep 29 09:32:44 crc kubenswrapper[4779]: I0929 09:32:44.003791 4779 scope.go:117] "RemoveContainer" containerID="75e695d4e2f6484fb919653b558c8e6e9087211b848b73e730785f30d7c716ae" Sep 29 09:32:44 crc kubenswrapper[4779]: I0929 09:32:44.014304 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-bs8sk" Sep 29 09:32:44 crc kubenswrapper[4779]: I0929 09:32:44.025051 4779 scope.go:117] "RemoveContainer" containerID="d985e097bdb5f50e009ca4ba16564e56af1450f3c296bd923ff1cf4f5e44894e" Sep 29 09:32:44 crc kubenswrapper[4779]: E0929 09:32:44.025409 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d985e097bdb5f50e009ca4ba16564e56af1450f3c296bd923ff1cf4f5e44894e\": container with ID starting with d985e097bdb5f50e009ca4ba16564e56af1450f3c296bd923ff1cf4f5e44894e not found: ID does not exist" containerID="d985e097bdb5f50e009ca4ba16564e56af1450f3c296bd923ff1cf4f5e44894e" Sep 29 09:32:44 crc kubenswrapper[4779]: I0929 09:32:44.025441 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d985e097bdb5f50e009ca4ba16564e56af1450f3c296bd923ff1cf4f5e44894e"} err="failed to get container status \"d985e097bdb5f50e009ca4ba16564e56af1450f3c296bd923ff1cf4f5e44894e\": rpc error: code = NotFound desc = could not find container \"d985e097bdb5f50e009ca4ba16564e56af1450f3c296bd923ff1cf4f5e44894e\": container with ID starting with d985e097bdb5f50e009ca4ba16564e56af1450f3c296bd923ff1cf4f5e44894e not found: ID does not exist" Sep 29 09:32:44 crc kubenswrapper[4779]: I0929 09:32:44.025515 4779 scope.go:117] "RemoveContainer" containerID="37055ea06b05a121c1a0ee192fb1db7ddf7cfc6089a41bf58993a2c3f54c9b75" Sep 29 09:32:44 crc kubenswrapper[4779]: E0929 09:32:44.025758 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"37055ea06b05a121c1a0ee192fb1db7ddf7cfc6089a41bf58993a2c3f54c9b75\": container with ID starting with 37055ea06b05a121c1a0ee192fb1db7ddf7cfc6089a41bf58993a2c3f54c9b75 not found: ID does not exist" containerID="37055ea06b05a121c1a0ee192fb1db7ddf7cfc6089a41bf58993a2c3f54c9b75" Sep 29 09:32:44 crc kubenswrapper[4779]: I0929 09:32:44.025802 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"37055ea06b05a121c1a0ee192fb1db7ddf7cfc6089a41bf58993a2c3f54c9b75"} err="failed to get container status \"37055ea06b05a121c1a0ee192fb1db7ddf7cfc6089a41bf58993a2c3f54c9b75\": rpc error: code = NotFound desc = could not find container \"37055ea06b05a121c1a0ee192fb1db7ddf7cfc6089a41bf58993a2c3f54c9b75\": container with ID starting with 37055ea06b05a121c1a0ee192fb1db7ddf7cfc6089a41bf58993a2c3f54c9b75 not found: ID does not exist" Sep 29 09:32:44 crc kubenswrapper[4779]: I0929 09:32:44.025816 4779 scope.go:117] "RemoveContainer" containerID="75e695d4e2f6484fb919653b558c8e6e9087211b848b73e730785f30d7c716ae" Sep 29 09:32:44 crc kubenswrapper[4779]: E0929 09:32:44.026040 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"75e695d4e2f6484fb919653b558c8e6e9087211b848b73e730785f30d7c716ae\": container with ID starting with 75e695d4e2f6484fb919653b558c8e6e9087211b848b73e730785f30d7c716ae not found: ID does not exist" containerID="75e695d4e2f6484fb919653b558c8e6e9087211b848b73e730785f30d7c716ae" Sep 29 09:32:44 crc kubenswrapper[4779]: I0929 09:32:44.026085 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"75e695d4e2f6484fb919653b558c8e6e9087211b848b73e730785f30d7c716ae"} err="failed to get container status \"75e695d4e2f6484fb919653b558c8e6e9087211b848b73e730785f30d7c716ae\": rpc error: code = NotFound desc = could not find container \"75e695d4e2f6484fb919653b558c8e6e9087211b848b73e730785f30d7c716ae\": container with ID starting with 75e695d4e2f6484fb919653b558c8e6e9087211b848b73e730785f30d7c716ae not found: ID does not exist" Sep 29 09:32:44 crc kubenswrapper[4779]: I0929 09:32:44.720717 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57fa213e-7e5b-451b-a6b1-d03c20a64e0f" path="/var/lib/kubelet/pods/57fa213e-7e5b-451b-a6b1-d03c20a64e0f/volumes" Sep 29 09:32:44 crc kubenswrapper[4779]: I0929 09:32:44.841176 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-7dw6m"] Sep 29 09:32:44 crc kubenswrapper[4779]: I0929 09:32:44.841463 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-7dw6m" podUID="06a364e4-0ea6-4fe1-bc83-7c98333e2163" containerName="registry-server" containerID="cri-o://09c89c8f8c7941359b61ddcc5ec2c0027b26c01929d6bd19eb98610442a03859" gracePeriod=2 Sep 29 09:32:44 crc kubenswrapper[4779]: I0929 09:32:44.957278 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-92pg4" podUID="b32e508f-1532-471b-89cc-3af8e07b50a0" containerName="registry-server" containerID="cri-o://f67dc6e02e56360246078e64b4462b41e541fb91bf0297cdf4d4ba7c5cbbf161" gracePeriod=2 Sep 29 09:32:45 crc kubenswrapper[4779]: I0929 09:32:45.963866 4779 generic.go:334] "Generic (PLEG): container finished" podID="06a364e4-0ea6-4fe1-bc83-7c98333e2163" containerID="09c89c8f8c7941359b61ddcc5ec2c0027b26c01929d6bd19eb98610442a03859" exitCode=0 Sep 29 09:32:45 crc kubenswrapper[4779]: I0929 09:32:45.963963 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7dw6m" event={"ID":"06a364e4-0ea6-4fe1-bc83-7c98333e2163","Type":"ContainerDied","Data":"09c89c8f8c7941359b61ddcc5ec2c0027b26c01929d6bd19eb98610442a03859"} Sep 29 09:32:46 crc kubenswrapper[4779]: I0929 09:32:46.007008 4779 generic.go:334] "Generic (PLEG): container finished" podID="b32e508f-1532-471b-89cc-3af8e07b50a0" containerID="f67dc6e02e56360246078e64b4462b41e541fb91bf0297cdf4d4ba7c5cbbf161" exitCode=0 Sep 29 09:32:46 crc kubenswrapper[4779]: I0929 09:32:46.007065 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-92pg4" event={"ID":"b32e508f-1532-471b-89cc-3af8e07b50a0","Type":"ContainerDied","Data":"f67dc6e02e56360246078e64b4462b41e541fb91bf0297cdf4d4ba7c5cbbf161"} Sep 29 09:32:46 crc kubenswrapper[4779]: I0929 09:32:46.242053 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-bs8sk"] Sep 29 09:32:46 crc kubenswrapper[4779]: I0929 09:32:46.242514 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-bs8sk" podUID="81762cd7-0154-40ce-858e-fb709553ea4d" containerName="registry-server" containerID="cri-o://f2fee8927abe93d0200464bbca3a3ff105da69cd3237c8c9ce148bb904a8b14b" gracePeriod=2 Sep 29 09:32:46 crc kubenswrapper[4779]: I0929 09:32:46.421736 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7dw6m" Sep 29 09:32:46 crc kubenswrapper[4779]: I0929 09:32:46.528001 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6xcmd\" (UniqueName: \"kubernetes.io/projected/06a364e4-0ea6-4fe1-bc83-7c98333e2163-kube-api-access-6xcmd\") pod \"06a364e4-0ea6-4fe1-bc83-7c98333e2163\" (UID: \"06a364e4-0ea6-4fe1-bc83-7c98333e2163\") " Sep 29 09:32:46 crc kubenswrapper[4779]: I0929 09:32:46.528304 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06a364e4-0ea6-4fe1-bc83-7c98333e2163-utilities\") pod \"06a364e4-0ea6-4fe1-bc83-7c98333e2163\" (UID: \"06a364e4-0ea6-4fe1-bc83-7c98333e2163\") " Sep 29 09:32:46 crc kubenswrapper[4779]: I0929 09:32:46.528366 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06a364e4-0ea6-4fe1-bc83-7c98333e2163-catalog-content\") pod \"06a364e4-0ea6-4fe1-bc83-7c98333e2163\" (UID: \"06a364e4-0ea6-4fe1-bc83-7c98333e2163\") " Sep 29 09:32:46 crc kubenswrapper[4779]: I0929 09:32:46.529005 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/06a364e4-0ea6-4fe1-bc83-7c98333e2163-utilities" (OuterVolumeSpecName: "utilities") pod "06a364e4-0ea6-4fe1-bc83-7c98333e2163" (UID: "06a364e4-0ea6-4fe1-bc83-7c98333e2163"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:32:46 crc kubenswrapper[4779]: I0929 09:32:46.545131 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/06a364e4-0ea6-4fe1-bc83-7c98333e2163-kube-api-access-6xcmd" (OuterVolumeSpecName: "kube-api-access-6xcmd") pod "06a364e4-0ea6-4fe1-bc83-7c98333e2163" (UID: "06a364e4-0ea6-4fe1-bc83-7c98333e2163"). InnerVolumeSpecName "kube-api-access-6xcmd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:32:46 crc kubenswrapper[4779]: I0929 09:32:46.590566 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/06a364e4-0ea6-4fe1-bc83-7c98333e2163-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "06a364e4-0ea6-4fe1-bc83-7c98333e2163" (UID: "06a364e4-0ea6-4fe1-bc83-7c98333e2163"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:32:46 crc kubenswrapper[4779]: I0929 09:32:46.611166 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-92pg4" Sep 29 09:32:46 crc kubenswrapper[4779]: I0929 09:32:46.617391 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bs8sk" Sep 29 09:32:46 crc kubenswrapper[4779]: I0929 09:32:46.630160 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06a364e4-0ea6-4fe1-bc83-7c98333e2163-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 09:32:46 crc kubenswrapper[4779]: I0929 09:32:46.630202 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06a364e4-0ea6-4fe1-bc83-7c98333e2163-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 09:32:46 crc kubenswrapper[4779]: I0929 09:32:46.630215 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6xcmd\" (UniqueName: \"kubernetes.io/projected/06a364e4-0ea6-4fe1-bc83-7c98333e2163-kube-api-access-6xcmd\") on node \"crc\" DevicePath \"\"" Sep 29 09:32:46 crc kubenswrapper[4779]: I0929 09:32:46.730531 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81762cd7-0154-40ce-858e-fb709553ea4d-catalog-content\") pod \"81762cd7-0154-40ce-858e-fb709553ea4d\" (UID: \"81762cd7-0154-40ce-858e-fb709553ea4d\") " Sep 29 09:32:46 crc kubenswrapper[4779]: I0929 09:32:46.730647 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rxv98\" (UniqueName: \"kubernetes.io/projected/81762cd7-0154-40ce-858e-fb709553ea4d-kube-api-access-rxv98\") pod \"81762cd7-0154-40ce-858e-fb709553ea4d\" (UID: \"81762cd7-0154-40ce-858e-fb709553ea4d\") " Sep 29 09:32:46 crc kubenswrapper[4779]: I0929 09:32:46.730709 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l98js\" (UniqueName: \"kubernetes.io/projected/b32e508f-1532-471b-89cc-3af8e07b50a0-kube-api-access-l98js\") pod \"b32e508f-1532-471b-89cc-3af8e07b50a0\" (UID: \"b32e508f-1532-471b-89cc-3af8e07b50a0\") " Sep 29 09:32:46 crc kubenswrapper[4779]: I0929 09:32:46.730793 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81762cd7-0154-40ce-858e-fb709553ea4d-utilities\") pod \"81762cd7-0154-40ce-858e-fb709553ea4d\" (UID: \"81762cd7-0154-40ce-858e-fb709553ea4d\") " Sep 29 09:32:46 crc kubenswrapper[4779]: I0929 09:32:46.730847 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b32e508f-1532-471b-89cc-3af8e07b50a0-utilities\") pod \"b32e508f-1532-471b-89cc-3af8e07b50a0\" (UID: \"b32e508f-1532-471b-89cc-3af8e07b50a0\") " Sep 29 09:32:46 crc kubenswrapper[4779]: I0929 09:32:46.730879 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b32e508f-1532-471b-89cc-3af8e07b50a0-catalog-content\") pod \"b32e508f-1532-471b-89cc-3af8e07b50a0\" (UID: \"b32e508f-1532-471b-89cc-3af8e07b50a0\") " Sep 29 09:32:46 crc kubenswrapper[4779]: I0929 09:32:46.731557 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b32e508f-1532-471b-89cc-3af8e07b50a0-utilities" (OuterVolumeSpecName: "utilities") pod "b32e508f-1532-471b-89cc-3af8e07b50a0" (UID: "b32e508f-1532-471b-89cc-3af8e07b50a0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:32:46 crc kubenswrapper[4779]: I0929 09:32:46.731729 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/81762cd7-0154-40ce-858e-fb709553ea4d-utilities" (OuterVolumeSpecName: "utilities") pod "81762cd7-0154-40ce-858e-fb709553ea4d" (UID: "81762cd7-0154-40ce-858e-fb709553ea4d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:32:46 crc kubenswrapper[4779]: I0929 09:32:46.733405 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/81762cd7-0154-40ce-858e-fb709553ea4d-kube-api-access-rxv98" (OuterVolumeSpecName: "kube-api-access-rxv98") pod "81762cd7-0154-40ce-858e-fb709553ea4d" (UID: "81762cd7-0154-40ce-858e-fb709553ea4d"). InnerVolumeSpecName "kube-api-access-rxv98". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:32:46 crc kubenswrapper[4779]: I0929 09:32:46.733721 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b32e508f-1532-471b-89cc-3af8e07b50a0-kube-api-access-l98js" (OuterVolumeSpecName: "kube-api-access-l98js") pod "b32e508f-1532-471b-89cc-3af8e07b50a0" (UID: "b32e508f-1532-471b-89cc-3af8e07b50a0"). InnerVolumeSpecName "kube-api-access-l98js". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:32:46 crc kubenswrapper[4779]: I0929 09:32:46.736403 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b32e508f-1532-471b-89cc-3af8e07b50a0-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 09:32:46 crc kubenswrapper[4779]: I0929 09:32:46.736431 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rxv98\" (UniqueName: \"kubernetes.io/projected/81762cd7-0154-40ce-858e-fb709553ea4d-kube-api-access-rxv98\") on node \"crc\" DevicePath \"\"" Sep 29 09:32:46 crc kubenswrapper[4779]: I0929 09:32:46.736457 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l98js\" (UniqueName: \"kubernetes.io/projected/b32e508f-1532-471b-89cc-3af8e07b50a0-kube-api-access-l98js\") on node \"crc\" DevicePath \"\"" Sep 29 09:32:46 crc kubenswrapper[4779]: I0929 09:32:46.736481 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81762cd7-0154-40ce-858e-fb709553ea4d-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 09:32:46 crc kubenswrapper[4779]: I0929 09:32:46.747620 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b32e508f-1532-471b-89cc-3af8e07b50a0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b32e508f-1532-471b-89cc-3af8e07b50a0" (UID: "b32e508f-1532-471b-89cc-3af8e07b50a0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:32:46 crc kubenswrapper[4779]: I0929 09:32:46.830273 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/81762cd7-0154-40ce-858e-fb709553ea4d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "81762cd7-0154-40ce-858e-fb709553ea4d" (UID: "81762cd7-0154-40ce-858e-fb709553ea4d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:32:46 crc kubenswrapper[4779]: I0929 09:32:46.838177 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b32e508f-1532-471b-89cc-3af8e07b50a0-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 09:32:46 crc kubenswrapper[4779]: I0929 09:32:46.838217 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81762cd7-0154-40ce-858e-fb709553ea4d-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 09:32:46 crc kubenswrapper[4779]: I0929 09:32:46.966337 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 09:32:46 crc kubenswrapper[4779]: I0929 09:32:46.966388 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 09:32:47 crc kubenswrapper[4779]: I0929 09:32:47.015266 4779 generic.go:334] "Generic (PLEG): container finished" podID="81762cd7-0154-40ce-858e-fb709553ea4d" containerID="f2fee8927abe93d0200464bbca3a3ff105da69cd3237c8c9ce148bb904a8b14b" exitCode=0 Sep 29 09:32:47 crc kubenswrapper[4779]: I0929 09:32:47.015342 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bs8sk" event={"ID":"81762cd7-0154-40ce-858e-fb709553ea4d","Type":"ContainerDied","Data":"f2fee8927abe93d0200464bbca3a3ff105da69cd3237c8c9ce148bb904a8b14b"} Sep 29 09:32:47 crc kubenswrapper[4779]: I0929 09:32:47.015383 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bs8sk" Sep 29 09:32:47 crc kubenswrapper[4779]: I0929 09:32:47.015859 4779 scope.go:117] "RemoveContainer" containerID="f2fee8927abe93d0200464bbca3a3ff105da69cd3237c8c9ce148bb904a8b14b" Sep 29 09:32:47 crc kubenswrapper[4779]: I0929 09:32:47.015834 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bs8sk" event={"ID":"81762cd7-0154-40ce-858e-fb709553ea4d","Type":"ContainerDied","Data":"22f9a01704dfab1ee55f22cacf8bc61a4ef686d424813b9cbd3a3ba1d9b23e37"} Sep 29 09:32:47 crc kubenswrapper[4779]: I0929 09:32:47.019409 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7dw6m" Sep 29 09:32:47 crc kubenswrapper[4779]: I0929 09:32:47.019408 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7dw6m" event={"ID":"06a364e4-0ea6-4fe1-bc83-7c98333e2163","Type":"ContainerDied","Data":"ce08eb43963096fddfe4b9ce3d7d69572fc7f9a0ac133f38d1b9db3126f30a33"} Sep 29 09:32:47 crc kubenswrapper[4779]: I0929 09:32:47.022466 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-92pg4" event={"ID":"b32e508f-1532-471b-89cc-3af8e07b50a0","Type":"ContainerDied","Data":"813d4b8df73690034545df489a9146593831976ee9b308ee3ae311da5a1eee47"} Sep 29 09:32:47 crc kubenswrapper[4779]: I0929 09:32:47.022581 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-92pg4" Sep 29 09:32:47 crc kubenswrapper[4779]: I0929 09:32:47.040246 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-7dw6m"] Sep 29 09:32:47 crc kubenswrapper[4779]: I0929 09:32:47.043077 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-7dw6m"] Sep 29 09:32:47 crc kubenswrapper[4779]: I0929 09:32:47.045553 4779 scope.go:117] "RemoveContainer" containerID="dca8bb4e86b023c1845067b759e53d111b88c379147d4457342eda88458bbf5c" Sep 29 09:32:47 crc kubenswrapper[4779]: I0929 09:32:47.055486 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-bs8sk"] Sep 29 09:32:47 crc kubenswrapper[4779]: I0929 09:32:47.062459 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-bs8sk"] Sep 29 09:32:47 crc kubenswrapper[4779]: I0929 09:32:47.064832 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-92pg4"] Sep 29 09:32:47 crc kubenswrapper[4779]: I0929 09:32:47.066978 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-92pg4"] Sep 29 09:32:47 crc kubenswrapper[4779]: I0929 09:32:47.078604 4779 scope.go:117] "RemoveContainer" containerID="f78f3d2110d673eb97ebfe50d7fcac8e3b6e388b3fd454b3d092ede74def3192" Sep 29 09:32:47 crc kubenswrapper[4779]: I0929 09:32:47.103452 4779 scope.go:117] "RemoveContainer" containerID="f2fee8927abe93d0200464bbca3a3ff105da69cd3237c8c9ce148bb904a8b14b" Sep 29 09:32:47 crc kubenswrapper[4779]: E0929 09:32:47.103932 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f2fee8927abe93d0200464bbca3a3ff105da69cd3237c8c9ce148bb904a8b14b\": container with ID starting with f2fee8927abe93d0200464bbca3a3ff105da69cd3237c8c9ce148bb904a8b14b not found: ID does not exist" containerID="f2fee8927abe93d0200464bbca3a3ff105da69cd3237c8c9ce148bb904a8b14b" Sep 29 09:32:47 crc kubenswrapper[4779]: I0929 09:32:47.103986 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f2fee8927abe93d0200464bbca3a3ff105da69cd3237c8c9ce148bb904a8b14b"} err="failed to get container status \"f2fee8927abe93d0200464bbca3a3ff105da69cd3237c8c9ce148bb904a8b14b\": rpc error: code = NotFound desc = could not find container \"f2fee8927abe93d0200464bbca3a3ff105da69cd3237c8c9ce148bb904a8b14b\": container with ID starting with f2fee8927abe93d0200464bbca3a3ff105da69cd3237c8c9ce148bb904a8b14b not found: ID does not exist" Sep 29 09:32:47 crc kubenswrapper[4779]: I0929 09:32:47.104022 4779 scope.go:117] "RemoveContainer" containerID="dca8bb4e86b023c1845067b759e53d111b88c379147d4457342eda88458bbf5c" Sep 29 09:32:47 crc kubenswrapper[4779]: E0929 09:32:47.104333 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dca8bb4e86b023c1845067b759e53d111b88c379147d4457342eda88458bbf5c\": container with ID starting with dca8bb4e86b023c1845067b759e53d111b88c379147d4457342eda88458bbf5c not found: ID does not exist" containerID="dca8bb4e86b023c1845067b759e53d111b88c379147d4457342eda88458bbf5c" Sep 29 09:32:47 crc kubenswrapper[4779]: I0929 09:32:47.104365 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dca8bb4e86b023c1845067b759e53d111b88c379147d4457342eda88458bbf5c"} err="failed to get container status \"dca8bb4e86b023c1845067b759e53d111b88c379147d4457342eda88458bbf5c\": rpc error: code = NotFound desc = could not find container \"dca8bb4e86b023c1845067b759e53d111b88c379147d4457342eda88458bbf5c\": container with ID starting with dca8bb4e86b023c1845067b759e53d111b88c379147d4457342eda88458bbf5c not found: ID does not exist" Sep 29 09:32:47 crc kubenswrapper[4779]: I0929 09:32:47.104390 4779 scope.go:117] "RemoveContainer" containerID="f78f3d2110d673eb97ebfe50d7fcac8e3b6e388b3fd454b3d092ede74def3192" Sep 29 09:32:47 crc kubenswrapper[4779]: E0929 09:32:47.104784 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f78f3d2110d673eb97ebfe50d7fcac8e3b6e388b3fd454b3d092ede74def3192\": container with ID starting with f78f3d2110d673eb97ebfe50d7fcac8e3b6e388b3fd454b3d092ede74def3192 not found: ID does not exist" containerID="f78f3d2110d673eb97ebfe50d7fcac8e3b6e388b3fd454b3d092ede74def3192" Sep 29 09:32:47 crc kubenswrapper[4779]: I0929 09:32:47.104859 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f78f3d2110d673eb97ebfe50d7fcac8e3b6e388b3fd454b3d092ede74def3192"} err="failed to get container status \"f78f3d2110d673eb97ebfe50d7fcac8e3b6e388b3fd454b3d092ede74def3192\": rpc error: code = NotFound desc = could not find container \"f78f3d2110d673eb97ebfe50d7fcac8e3b6e388b3fd454b3d092ede74def3192\": container with ID starting with f78f3d2110d673eb97ebfe50d7fcac8e3b6e388b3fd454b3d092ede74def3192 not found: ID does not exist" Sep 29 09:32:47 crc kubenswrapper[4779]: I0929 09:32:47.104926 4779 scope.go:117] "RemoveContainer" containerID="09c89c8f8c7941359b61ddcc5ec2c0027b26c01929d6bd19eb98610442a03859" Sep 29 09:32:47 crc kubenswrapper[4779]: I0929 09:32:47.119164 4779 scope.go:117] "RemoveContainer" containerID="dda5b30a1b7f0a619784bfb2e1806fa50d9acb5c770d84639121af48eea1bbdf" Sep 29 09:32:47 crc kubenswrapper[4779]: I0929 09:32:47.148047 4779 scope.go:117] "RemoveContainer" containerID="931731ba6ed01e26f417d2ac11ed74d44354f8324a11f75d03ba06dce945a09b" Sep 29 09:32:47 crc kubenswrapper[4779]: I0929 09:32:47.173344 4779 scope.go:117] "RemoveContainer" containerID="f67dc6e02e56360246078e64b4462b41e541fb91bf0297cdf4d4ba7c5cbbf161" Sep 29 09:32:47 crc kubenswrapper[4779]: I0929 09:32:47.184020 4779 scope.go:117] "RemoveContainer" containerID="39ede4e4c5862e2cd536fd342922353eda934e4c96437a743b1355033a10bb2b" Sep 29 09:32:47 crc kubenswrapper[4779]: I0929 09:32:47.203897 4779 scope.go:117] "RemoveContainer" containerID="b67dea8ecaceff8f7064c698fac897a646ef27129468e785dd84786b25915ff8" Sep 29 09:32:48 crc kubenswrapper[4779]: I0929 09:32:48.720302 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="06a364e4-0ea6-4fe1-bc83-7c98333e2163" path="/var/lib/kubelet/pods/06a364e4-0ea6-4fe1-bc83-7c98333e2163/volumes" Sep 29 09:32:48 crc kubenswrapper[4779]: I0929 09:32:48.721499 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="81762cd7-0154-40ce-858e-fb709553ea4d" path="/var/lib/kubelet/pods/81762cd7-0154-40ce-858e-fb709553ea4d/volumes" Sep 29 09:32:48 crc kubenswrapper[4779]: I0929 09:32:48.722646 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b32e508f-1532-471b-89cc-3af8e07b50a0" path="/var/lib/kubelet/pods/b32e508f-1532-471b-89cc-3af8e07b50a0/volumes" Sep 29 09:32:56 crc kubenswrapper[4779]: I0929 09:32:56.854679 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 09:33:06 crc kubenswrapper[4779]: I0929 09:33:06.808436 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-dvvh4" podUID="2118e02f-c7d4-4def-a9f1-ee9d81b6408f" containerName="oauth-openshift" containerID="cri-o://162576b523ea64489c5927c3c0410b157b078d22c54f230a915704aeaabf49e5" gracePeriod=15 Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.131651 4779 generic.go:334] "Generic (PLEG): container finished" podID="2118e02f-c7d4-4def-a9f1-ee9d81b6408f" containerID="162576b523ea64489c5927c3c0410b157b078d22c54f230a915704aeaabf49e5" exitCode=0 Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.131716 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-dvvh4" event={"ID":"2118e02f-c7d4-4def-a9f1-ee9d81b6408f","Type":"ContainerDied","Data":"162576b523ea64489c5927c3c0410b157b078d22c54f230a915704aeaabf49e5"} Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.220114 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-dvvh4" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.253643 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-6d4bd77db6-24v76"] Sep 29 09:33:07 crc kubenswrapper[4779]: E0929 09:33:07.253879 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06a364e4-0ea6-4fe1-bc83-7c98333e2163" containerName="extract-utilities" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.253896 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="06a364e4-0ea6-4fe1-bc83-7c98333e2163" containerName="extract-utilities" Sep 29 09:33:07 crc kubenswrapper[4779]: E0929 09:33:07.253950 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81762cd7-0154-40ce-858e-fb709553ea4d" containerName="extract-utilities" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.253958 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="81762cd7-0154-40ce-858e-fb709553ea4d" containerName="extract-utilities" Sep 29 09:33:07 crc kubenswrapper[4779]: E0929 09:33:07.253968 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b32e508f-1532-471b-89cc-3af8e07b50a0" containerName="extract-utilities" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.253976 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="b32e508f-1532-471b-89cc-3af8e07b50a0" containerName="extract-utilities" Sep 29 09:33:07 crc kubenswrapper[4779]: E0929 09:33:07.253983 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b32e508f-1532-471b-89cc-3af8e07b50a0" containerName="extract-content" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.253990 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="b32e508f-1532-471b-89cc-3af8e07b50a0" containerName="extract-content" Sep 29 09:33:07 crc kubenswrapper[4779]: E0929 09:33:07.254011 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06a364e4-0ea6-4fe1-bc83-7c98333e2163" containerName="extract-content" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.254019 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="06a364e4-0ea6-4fe1-bc83-7c98333e2163" containerName="extract-content" Sep 29 09:33:07 crc kubenswrapper[4779]: E0929 09:33:07.254029 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b32e508f-1532-471b-89cc-3af8e07b50a0" containerName="registry-server" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.254036 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="b32e508f-1532-471b-89cc-3af8e07b50a0" containerName="registry-server" Sep 29 09:33:07 crc kubenswrapper[4779]: E0929 09:33:07.254047 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57fa213e-7e5b-451b-a6b1-d03c20a64e0f" containerName="extract-utilities" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.254057 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="57fa213e-7e5b-451b-a6b1-d03c20a64e0f" containerName="extract-utilities" Sep 29 09:33:07 crc kubenswrapper[4779]: E0929 09:33:07.254068 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2118e02f-c7d4-4def-a9f1-ee9d81b6408f" containerName="oauth-openshift" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.254075 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="2118e02f-c7d4-4def-a9f1-ee9d81b6408f" containerName="oauth-openshift" Sep 29 09:33:07 crc kubenswrapper[4779]: E0929 09:33:07.254087 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c68ad83b-d47d-4584-8d9a-b6702d4219e4" containerName="pruner" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.254095 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="c68ad83b-d47d-4584-8d9a-b6702d4219e4" containerName="pruner" Sep 29 09:33:07 crc kubenswrapper[4779]: E0929 09:33:07.254107 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57fa213e-7e5b-451b-a6b1-d03c20a64e0f" containerName="extract-content" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.254115 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="57fa213e-7e5b-451b-a6b1-d03c20a64e0f" containerName="extract-content" Sep 29 09:33:07 crc kubenswrapper[4779]: E0929 09:33:07.254122 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81762cd7-0154-40ce-858e-fb709553ea4d" containerName="extract-content" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.254129 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="81762cd7-0154-40ce-858e-fb709553ea4d" containerName="extract-content" Sep 29 09:33:07 crc kubenswrapper[4779]: E0929 09:33:07.254138 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81762cd7-0154-40ce-858e-fb709553ea4d" containerName="registry-server" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.254144 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="81762cd7-0154-40ce-858e-fb709553ea4d" containerName="registry-server" Sep 29 09:33:07 crc kubenswrapper[4779]: E0929 09:33:07.254155 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06a364e4-0ea6-4fe1-bc83-7c98333e2163" containerName="registry-server" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.254162 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="06a364e4-0ea6-4fe1-bc83-7c98333e2163" containerName="registry-server" Sep 29 09:33:07 crc kubenswrapper[4779]: E0929 09:33:07.254174 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57fa213e-7e5b-451b-a6b1-d03c20a64e0f" containerName="registry-server" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.254181 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="57fa213e-7e5b-451b-a6b1-d03c20a64e0f" containerName="registry-server" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.254289 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="2118e02f-c7d4-4def-a9f1-ee9d81b6408f" containerName="oauth-openshift" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.254302 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="57fa213e-7e5b-451b-a6b1-d03c20a64e0f" containerName="registry-server" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.254313 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="c68ad83b-d47d-4584-8d9a-b6702d4219e4" containerName="pruner" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.254323 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="06a364e4-0ea6-4fe1-bc83-7c98333e2163" containerName="registry-server" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.254336 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="b32e508f-1532-471b-89cc-3af8e07b50a0" containerName="registry-server" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.254346 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="81762cd7-0154-40ce-858e-fb709553ea4d" containerName="registry-server" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.254760 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-6d4bd77db6-24v76" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.274766 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-6d4bd77db6-24v76"] Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.420242 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-system-serving-cert\") pod \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\" (UID: \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\") " Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.420335 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rp5tn\" (UniqueName: \"kubernetes.io/projected/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-kube-api-access-rp5tn\") pod \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\" (UID: \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\") " Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.420412 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-system-router-certs\") pod \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\" (UID: \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\") " Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.420458 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-system-session\") pod \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\" (UID: \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\") " Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.420513 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-audit-dir\") pod \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\" (UID: \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\") " Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.420582 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-user-template-provider-selection\") pod \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\" (UID: \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\") " Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.420622 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-system-cliconfig\") pod \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\" (UID: \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\") " Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.420650 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "2118e02f-c7d4-4def-a9f1-ee9d81b6408f" (UID: "2118e02f-c7d4-4def-a9f1-ee9d81b6408f"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.420677 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-user-template-error\") pod \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\" (UID: \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\") " Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.420708 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-audit-policies\") pod \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\" (UID: \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\") " Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.420742 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-system-service-ca\") pod \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\" (UID: \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\") " Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.420779 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-user-template-login\") pod \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\" (UID: \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\") " Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.420825 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-system-trusted-ca-bundle\") pod \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\" (UID: \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\") " Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.420861 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-user-idp-0-file-data\") pod \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\" (UID: \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\") " Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.420954 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-system-ocp-branding-template\") pod \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\" (UID: \"2118e02f-c7d4-4def-a9f1-ee9d81b6408f\") " Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.421417 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "2118e02f-c7d4-4def-a9f1-ee9d81b6408f" (UID: "2118e02f-c7d4-4def-a9f1-ee9d81b6408f"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.421693 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/0db91151-2b49-4835-89ac-4f093a0a7c11-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-6d4bd77db6-24v76\" (UID: \"0db91151-2b49-4835-89ac-4f093a0a7c11\") " pod="openshift-authentication/oauth-openshift-6d4bd77db6-24v76" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.421731 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/0db91151-2b49-4835-89ac-4f093a0a7c11-audit-policies\") pod \"oauth-openshift-6d4bd77db6-24v76\" (UID: \"0db91151-2b49-4835-89ac-4f093a0a7c11\") " pod="openshift-authentication/oauth-openshift-6d4bd77db6-24v76" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.421750 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/0db91151-2b49-4835-89ac-4f093a0a7c11-v4-0-config-system-session\") pod \"oauth-openshift-6d4bd77db6-24v76\" (UID: \"0db91151-2b49-4835-89ac-4f093a0a7c11\") " pod="openshift-authentication/oauth-openshift-6d4bd77db6-24v76" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.421784 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/0db91151-2b49-4835-89ac-4f093a0a7c11-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-6d4bd77db6-24v76\" (UID: \"0db91151-2b49-4835-89ac-4f093a0a7c11\") " pod="openshift-authentication/oauth-openshift-6d4bd77db6-24v76" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.421814 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/0db91151-2b49-4835-89ac-4f093a0a7c11-v4-0-config-system-router-certs\") pod \"oauth-openshift-6d4bd77db6-24v76\" (UID: \"0db91151-2b49-4835-89ac-4f093a0a7c11\") " pod="openshift-authentication/oauth-openshift-6d4bd77db6-24v76" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.421838 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/0db91151-2b49-4835-89ac-4f093a0a7c11-v4-0-config-user-template-error\") pod \"oauth-openshift-6d4bd77db6-24v76\" (UID: \"0db91151-2b49-4835-89ac-4f093a0a7c11\") " pod="openshift-authentication/oauth-openshift-6d4bd77db6-24v76" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.421869 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/0db91151-2b49-4835-89ac-4f093a0a7c11-v4-0-config-user-template-login\") pod \"oauth-openshift-6d4bd77db6-24v76\" (UID: \"0db91151-2b49-4835-89ac-4f093a0a7c11\") " pod="openshift-authentication/oauth-openshift-6d4bd77db6-24v76" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.421919 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zppxr\" (UniqueName: \"kubernetes.io/projected/0db91151-2b49-4835-89ac-4f093a0a7c11-kube-api-access-zppxr\") pod \"oauth-openshift-6d4bd77db6-24v76\" (UID: \"0db91151-2b49-4835-89ac-4f093a0a7c11\") " pod="openshift-authentication/oauth-openshift-6d4bd77db6-24v76" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.421964 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/0db91151-2b49-4835-89ac-4f093a0a7c11-v4-0-config-system-service-ca\") pod \"oauth-openshift-6d4bd77db6-24v76\" (UID: \"0db91151-2b49-4835-89ac-4f093a0a7c11\") " pod="openshift-authentication/oauth-openshift-6d4bd77db6-24v76" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.422352 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/0db91151-2b49-4835-89ac-4f093a0a7c11-v4-0-config-system-serving-cert\") pod \"oauth-openshift-6d4bd77db6-24v76\" (UID: \"0db91151-2b49-4835-89ac-4f093a0a7c11\") " pod="openshift-authentication/oauth-openshift-6d4bd77db6-24v76" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.422469 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0db91151-2b49-4835-89ac-4f093a0a7c11-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-6d4bd77db6-24v76\" (UID: \"0db91151-2b49-4835-89ac-4f093a0a7c11\") " pod="openshift-authentication/oauth-openshift-6d4bd77db6-24v76" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.422500 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "2118e02f-c7d4-4def-a9f1-ee9d81b6408f" (UID: "2118e02f-c7d4-4def-a9f1-ee9d81b6408f"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.422636 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/0db91151-2b49-4835-89ac-4f093a0a7c11-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-6d4bd77db6-24v76\" (UID: \"0db91151-2b49-4835-89ac-4f093a0a7c11\") " pod="openshift-authentication/oauth-openshift-6d4bd77db6-24v76" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.422753 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "2118e02f-c7d4-4def-a9f1-ee9d81b6408f" (UID: "2118e02f-c7d4-4def-a9f1-ee9d81b6408f"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.422533 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "2118e02f-c7d4-4def-a9f1-ee9d81b6408f" (UID: "2118e02f-c7d4-4def-a9f1-ee9d81b6408f"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.422868 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/0db91151-2b49-4835-89ac-4f093a0a7c11-v4-0-config-system-cliconfig\") pod \"oauth-openshift-6d4bd77db6-24v76\" (UID: \"0db91151-2b49-4835-89ac-4f093a0a7c11\") " pod="openshift-authentication/oauth-openshift-6d4bd77db6-24v76" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.423029 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/0db91151-2b49-4835-89ac-4f093a0a7c11-audit-dir\") pod \"oauth-openshift-6d4bd77db6-24v76\" (UID: \"0db91151-2b49-4835-89ac-4f093a0a7c11\") " pod="openshift-authentication/oauth-openshift-6d4bd77db6-24v76" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.423176 4779 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-audit-dir\") on node \"crc\" DevicePath \"\"" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.423201 4779 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.423220 4779 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-audit-policies\") on node \"crc\" DevicePath \"\"" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.423237 4779 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.423256 4779 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.425997 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "2118e02f-c7d4-4def-a9f1-ee9d81b6408f" (UID: "2118e02f-c7d4-4def-a9f1-ee9d81b6408f"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.426028 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "2118e02f-c7d4-4def-a9f1-ee9d81b6408f" (UID: "2118e02f-c7d4-4def-a9f1-ee9d81b6408f"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.426322 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "2118e02f-c7d4-4def-a9f1-ee9d81b6408f" (UID: "2118e02f-c7d4-4def-a9f1-ee9d81b6408f"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.426719 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-kube-api-access-rp5tn" (OuterVolumeSpecName: "kube-api-access-rp5tn") pod "2118e02f-c7d4-4def-a9f1-ee9d81b6408f" (UID: "2118e02f-c7d4-4def-a9f1-ee9d81b6408f"). InnerVolumeSpecName "kube-api-access-rp5tn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.427581 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "2118e02f-c7d4-4def-a9f1-ee9d81b6408f" (UID: "2118e02f-c7d4-4def-a9f1-ee9d81b6408f"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.428251 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "2118e02f-c7d4-4def-a9f1-ee9d81b6408f" (UID: "2118e02f-c7d4-4def-a9f1-ee9d81b6408f"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.428456 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "2118e02f-c7d4-4def-a9f1-ee9d81b6408f" (UID: "2118e02f-c7d4-4def-a9f1-ee9d81b6408f"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.429004 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "2118e02f-c7d4-4def-a9f1-ee9d81b6408f" (UID: "2118e02f-c7d4-4def-a9f1-ee9d81b6408f"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.434049 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "2118e02f-c7d4-4def-a9f1-ee9d81b6408f" (UID: "2118e02f-c7d4-4def-a9f1-ee9d81b6408f"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.524735 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/0db91151-2b49-4835-89ac-4f093a0a7c11-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-6d4bd77db6-24v76\" (UID: \"0db91151-2b49-4835-89ac-4f093a0a7c11\") " pod="openshift-authentication/oauth-openshift-6d4bd77db6-24v76" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.524826 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/0db91151-2b49-4835-89ac-4f093a0a7c11-audit-policies\") pod \"oauth-openshift-6d4bd77db6-24v76\" (UID: \"0db91151-2b49-4835-89ac-4f093a0a7c11\") " pod="openshift-authentication/oauth-openshift-6d4bd77db6-24v76" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.524874 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/0db91151-2b49-4835-89ac-4f093a0a7c11-v4-0-config-system-session\") pod \"oauth-openshift-6d4bd77db6-24v76\" (UID: \"0db91151-2b49-4835-89ac-4f093a0a7c11\") " pod="openshift-authentication/oauth-openshift-6d4bd77db6-24v76" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.524959 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/0db91151-2b49-4835-89ac-4f093a0a7c11-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-6d4bd77db6-24v76\" (UID: \"0db91151-2b49-4835-89ac-4f093a0a7c11\") " pod="openshift-authentication/oauth-openshift-6d4bd77db6-24v76" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.524993 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/0db91151-2b49-4835-89ac-4f093a0a7c11-v4-0-config-system-router-certs\") pod \"oauth-openshift-6d4bd77db6-24v76\" (UID: \"0db91151-2b49-4835-89ac-4f093a0a7c11\") " pod="openshift-authentication/oauth-openshift-6d4bd77db6-24v76" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.525055 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/0db91151-2b49-4835-89ac-4f093a0a7c11-v4-0-config-user-template-error\") pod \"oauth-openshift-6d4bd77db6-24v76\" (UID: \"0db91151-2b49-4835-89ac-4f093a0a7c11\") " pod="openshift-authentication/oauth-openshift-6d4bd77db6-24v76" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.525106 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/0db91151-2b49-4835-89ac-4f093a0a7c11-v4-0-config-user-template-login\") pod \"oauth-openshift-6d4bd77db6-24v76\" (UID: \"0db91151-2b49-4835-89ac-4f093a0a7c11\") " pod="openshift-authentication/oauth-openshift-6d4bd77db6-24v76" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.525131 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zppxr\" (UniqueName: \"kubernetes.io/projected/0db91151-2b49-4835-89ac-4f093a0a7c11-kube-api-access-zppxr\") pod \"oauth-openshift-6d4bd77db6-24v76\" (UID: \"0db91151-2b49-4835-89ac-4f093a0a7c11\") " pod="openshift-authentication/oauth-openshift-6d4bd77db6-24v76" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.525196 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/0db91151-2b49-4835-89ac-4f093a0a7c11-v4-0-config-system-service-ca\") pod \"oauth-openshift-6d4bd77db6-24v76\" (UID: \"0db91151-2b49-4835-89ac-4f093a0a7c11\") " pod="openshift-authentication/oauth-openshift-6d4bd77db6-24v76" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.525231 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/0db91151-2b49-4835-89ac-4f093a0a7c11-v4-0-config-system-serving-cert\") pod \"oauth-openshift-6d4bd77db6-24v76\" (UID: \"0db91151-2b49-4835-89ac-4f093a0a7c11\") " pod="openshift-authentication/oauth-openshift-6d4bd77db6-24v76" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.525279 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0db91151-2b49-4835-89ac-4f093a0a7c11-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-6d4bd77db6-24v76\" (UID: \"0db91151-2b49-4835-89ac-4f093a0a7c11\") " pod="openshift-authentication/oauth-openshift-6d4bd77db6-24v76" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.525346 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/0db91151-2b49-4835-89ac-4f093a0a7c11-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-6d4bd77db6-24v76\" (UID: \"0db91151-2b49-4835-89ac-4f093a0a7c11\") " pod="openshift-authentication/oauth-openshift-6d4bd77db6-24v76" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.525387 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/0db91151-2b49-4835-89ac-4f093a0a7c11-v4-0-config-system-cliconfig\") pod \"oauth-openshift-6d4bd77db6-24v76\" (UID: \"0db91151-2b49-4835-89ac-4f093a0a7c11\") " pod="openshift-authentication/oauth-openshift-6d4bd77db6-24v76" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.525423 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/0db91151-2b49-4835-89ac-4f093a0a7c11-audit-dir\") pod \"oauth-openshift-6d4bd77db6-24v76\" (UID: \"0db91151-2b49-4835-89ac-4f093a0a7c11\") " pod="openshift-authentication/oauth-openshift-6d4bd77db6-24v76" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.525470 4779 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.525486 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rp5tn\" (UniqueName: \"kubernetes.io/projected/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-kube-api-access-rp5tn\") on node \"crc\" DevicePath \"\"" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.525501 4779 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.525515 4779 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.525529 4779 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.525544 4779 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.525556 4779 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.525571 4779 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.525585 4779 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/2118e02f-c7d4-4def-a9f1-ee9d81b6408f-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.525639 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/0db91151-2b49-4835-89ac-4f093a0a7c11-audit-dir\") pod \"oauth-openshift-6d4bd77db6-24v76\" (UID: \"0db91151-2b49-4835-89ac-4f093a0a7c11\") " pod="openshift-authentication/oauth-openshift-6d4bd77db6-24v76" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.526445 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/0db91151-2b49-4835-89ac-4f093a0a7c11-v4-0-config-system-service-ca\") pod \"oauth-openshift-6d4bd77db6-24v76\" (UID: \"0db91151-2b49-4835-89ac-4f093a0a7c11\") " pod="openshift-authentication/oauth-openshift-6d4bd77db6-24v76" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.527566 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/0db91151-2b49-4835-89ac-4f093a0a7c11-v4-0-config-system-cliconfig\") pod \"oauth-openshift-6d4bd77db6-24v76\" (UID: \"0db91151-2b49-4835-89ac-4f093a0a7c11\") " pod="openshift-authentication/oauth-openshift-6d4bd77db6-24v76" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.527869 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0db91151-2b49-4835-89ac-4f093a0a7c11-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-6d4bd77db6-24v76\" (UID: \"0db91151-2b49-4835-89ac-4f093a0a7c11\") " pod="openshift-authentication/oauth-openshift-6d4bd77db6-24v76" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.528788 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/0db91151-2b49-4835-89ac-4f093a0a7c11-v4-0-config-user-template-error\") pod \"oauth-openshift-6d4bd77db6-24v76\" (UID: \"0db91151-2b49-4835-89ac-4f093a0a7c11\") " pod="openshift-authentication/oauth-openshift-6d4bd77db6-24v76" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.529040 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/0db91151-2b49-4835-89ac-4f093a0a7c11-audit-policies\") pod \"oauth-openshift-6d4bd77db6-24v76\" (UID: \"0db91151-2b49-4835-89ac-4f093a0a7c11\") " pod="openshift-authentication/oauth-openshift-6d4bd77db6-24v76" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.529052 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/0db91151-2b49-4835-89ac-4f093a0a7c11-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-6d4bd77db6-24v76\" (UID: \"0db91151-2b49-4835-89ac-4f093a0a7c11\") " pod="openshift-authentication/oauth-openshift-6d4bd77db6-24v76" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.529263 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/0db91151-2b49-4835-89ac-4f093a0a7c11-v4-0-config-system-session\") pod \"oauth-openshift-6d4bd77db6-24v76\" (UID: \"0db91151-2b49-4835-89ac-4f093a0a7c11\") " pod="openshift-authentication/oauth-openshift-6d4bd77db6-24v76" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.530139 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/0db91151-2b49-4835-89ac-4f093a0a7c11-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-6d4bd77db6-24v76\" (UID: \"0db91151-2b49-4835-89ac-4f093a0a7c11\") " pod="openshift-authentication/oauth-openshift-6d4bd77db6-24v76" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.530387 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/0db91151-2b49-4835-89ac-4f093a0a7c11-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-6d4bd77db6-24v76\" (UID: \"0db91151-2b49-4835-89ac-4f093a0a7c11\") " pod="openshift-authentication/oauth-openshift-6d4bd77db6-24v76" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.532238 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/0db91151-2b49-4835-89ac-4f093a0a7c11-v4-0-config-user-template-login\") pod \"oauth-openshift-6d4bd77db6-24v76\" (UID: \"0db91151-2b49-4835-89ac-4f093a0a7c11\") " pod="openshift-authentication/oauth-openshift-6d4bd77db6-24v76" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.533098 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/0db91151-2b49-4835-89ac-4f093a0a7c11-v4-0-config-system-serving-cert\") pod \"oauth-openshift-6d4bd77db6-24v76\" (UID: \"0db91151-2b49-4835-89ac-4f093a0a7c11\") " pod="openshift-authentication/oauth-openshift-6d4bd77db6-24v76" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.533867 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/0db91151-2b49-4835-89ac-4f093a0a7c11-v4-0-config-system-router-certs\") pod \"oauth-openshift-6d4bd77db6-24v76\" (UID: \"0db91151-2b49-4835-89ac-4f093a0a7c11\") " pod="openshift-authentication/oauth-openshift-6d4bd77db6-24v76" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.546424 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zppxr\" (UniqueName: \"kubernetes.io/projected/0db91151-2b49-4835-89ac-4f093a0a7c11-kube-api-access-zppxr\") pod \"oauth-openshift-6d4bd77db6-24v76\" (UID: \"0db91151-2b49-4835-89ac-4f093a0a7c11\") " pod="openshift-authentication/oauth-openshift-6d4bd77db6-24v76" Sep 29 09:33:07 crc kubenswrapper[4779]: I0929 09:33:07.579242 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-6d4bd77db6-24v76" Sep 29 09:33:08 crc kubenswrapper[4779]: I0929 09:33:08.008256 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-6d4bd77db6-24v76"] Sep 29 09:33:08 crc kubenswrapper[4779]: W0929 09:33:08.019022 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0db91151_2b49_4835_89ac_4f093a0a7c11.slice/crio-7ad66089f1ad920cec49e322d947def5bf12c64c31c478c480ce7e60d940b38b WatchSource:0}: Error finding container 7ad66089f1ad920cec49e322d947def5bf12c64c31c478c480ce7e60d940b38b: Status 404 returned error can't find the container with id 7ad66089f1ad920cec49e322d947def5bf12c64c31c478c480ce7e60d940b38b Sep 29 09:33:08 crc kubenswrapper[4779]: I0929 09:33:08.138350 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-6d4bd77db6-24v76" event={"ID":"0db91151-2b49-4835-89ac-4f093a0a7c11","Type":"ContainerStarted","Data":"7ad66089f1ad920cec49e322d947def5bf12c64c31c478c480ce7e60d940b38b"} Sep 29 09:33:08 crc kubenswrapper[4779]: I0929 09:33:08.141361 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-dvvh4" event={"ID":"2118e02f-c7d4-4def-a9f1-ee9d81b6408f","Type":"ContainerDied","Data":"9db60f672ded3dac2983ad0c03018c35bd284956c22158a1dee2ab7e47197f94"} Sep 29 09:33:08 crc kubenswrapper[4779]: I0929 09:33:08.141399 4779 scope.go:117] "RemoveContainer" containerID="162576b523ea64489c5927c3c0410b157b078d22c54f230a915704aeaabf49e5" Sep 29 09:33:08 crc kubenswrapper[4779]: I0929 09:33:08.141543 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-dvvh4" Sep 29 09:33:08 crc kubenswrapper[4779]: I0929 09:33:08.185501 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-dvvh4"] Sep 29 09:33:08 crc kubenswrapper[4779]: I0929 09:33:08.193265 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-dvvh4"] Sep 29 09:33:08 crc kubenswrapper[4779]: I0929 09:33:08.721197 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2118e02f-c7d4-4def-a9f1-ee9d81b6408f" path="/var/lib/kubelet/pods/2118e02f-c7d4-4def-a9f1-ee9d81b6408f/volumes" Sep 29 09:33:09 crc kubenswrapper[4779]: I0929 09:33:09.150100 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-6d4bd77db6-24v76" event={"ID":"0db91151-2b49-4835-89ac-4f093a0a7c11","Type":"ContainerStarted","Data":"db74d926dbbcdebd6ecf02eda1e8bd3d5fa5d13b863171e35f59482b752a1638"} Sep 29 09:33:09 crc kubenswrapper[4779]: I0929 09:33:09.150819 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-6d4bd77db6-24v76" Sep 29 09:33:09 crc kubenswrapper[4779]: I0929 09:33:09.155983 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-6d4bd77db6-24v76" Sep 29 09:33:09 crc kubenswrapper[4779]: I0929 09:33:09.175784 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-6d4bd77db6-24v76" podStartSLOduration=28.175765718 podStartE2EDuration="28.175765718s" podCreationTimestamp="2025-09-29 09:32:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:33:09.1712627 +0000 UTC m=+221.152586604" watchObservedRunningTime="2025-09-29 09:33:09.175765718 +0000 UTC m=+221.157089632" Sep 29 09:33:16 crc kubenswrapper[4779]: I0929 09:33:16.966407 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 09:33:16 crc kubenswrapper[4779]: I0929 09:33:16.967114 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 09:33:16 crc kubenswrapper[4779]: I0929 09:33:16.967177 4779 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" Sep 29 09:33:16 crc kubenswrapper[4779]: I0929 09:33:16.968072 4779 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c7979507aa9ce4c5e1c0f9a0d07716433ab9cebcc02e8b6ba01251ced0bab99f"} pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 09:33:16 crc kubenswrapper[4779]: I0929 09:33:16.968175 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" containerID="cri-o://c7979507aa9ce4c5e1c0f9a0d07716433ab9cebcc02e8b6ba01251ced0bab99f" gracePeriod=600 Sep 29 09:33:17 crc kubenswrapper[4779]: I0929 09:33:17.199265 4779 generic.go:334] "Generic (PLEG): container finished" podID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerID="c7979507aa9ce4c5e1c0f9a0d07716433ab9cebcc02e8b6ba01251ced0bab99f" exitCode=0 Sep 29 09:33:17 crc kubenswrapper[4779]: I0929 09:33:17.199441 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" event={"ID":"f1a5d3a7-37d9-4a87-864c-e4af7f504a19","Type":"ContainerDied","Data":"c7979507aa9ce4c5e1c0f9a0d07716433ab9cebcc02e8b6ba01251ced0bab99f"} Sep 29 09:33:18 crc kubenswrapper[4779]: I0929 09:33:18.207302 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" event={"ID":"f1a5d3a7-37d9-4a87-864c-e4af7f504a19","Type":"ContainerStarted","Data":"fce338249e8d781ebcc8dd4226aa44e91a894c7151c8ca6d0b4f7848ae00e827"} Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.149370 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-z68v6"] Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.150555 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-z68v6" podUID="d480d7e4-9bc7-40ed-ab03-d091d67a7a9a" containerName="registry-server" containerID="cri-o://a1ed42357c550a1f773b7b75502324a32c8a15da49a6d36bf7c4984def499011" gracePeriod=30 Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.163745 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rmf6p"] Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.164012 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-rmf6p" podUID="7db7e11b-22e3-45bf-a365-0a3583e0d52d" containerName="registry-server" containerID="cri-o://5e92500e82c512735102892bc85994c566b09dcd679c4d69f13b8a72df84371f" gracePeriod=30 Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.179293 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-dkmtc"] Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.180038 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-dkmtc" podUID="cb9c789a-4f39-46ee-8e61-5e39d65daf38" containerName="marketplace-operator" containerID="cri-o://cd8dca10d6a88af03c12fe6a564136dce54a84bd63370b28324793527bf20679" gracePeriod=30 Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.185997 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-d2rck"] Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.186198 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-d2rck" podUID="a21410c1-e475-491e-9298-2bbba632e116" containerName="registry-server" containerID="cri-o://ca564e6c76ad08b6bb1f8b117f35b3f718a613c4cb4d3d8d447c4a352d8a49da" gracePeriod=30 Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.196003 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-8pkd5"] Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.196201 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-8pkd5" podUID="e2a15850-8417-40bc-8d7a-55ad976e0e47" containerName="registry-server" containerID="cri-o://ec765e95f83be37c1e1c12a891a560b6397519cf394c96396f8bab65f19d253c" gracePeriod=30 Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.205072 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-jtxkm"] Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.205652 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-jtxkm" Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.214436 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-jtxkm"] Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.282820 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/c886bf87-12c2-4c35-80de-3bbf58c0df66-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-jtxkm\" (UID: \"c886bf87-12c2-4c35-80de-3bbf58c0df66\") " pod="openshift-marketplace/marketplace-operator-79b997595-jtxkm" Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.282985 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c886bf87-12c2-4c35-80de-3bbf58c0df66-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-jtxkm\" (UID: \"c886bf87-12c2-4c35-80de-3bbf58c0df66\") " pod="openshift-marketplace/marketplace-operator-79b997595-jtxkm" Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.283033 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5flkl\" (UniqueName: \"kubernetes.io/projected/c886bf87-12c2-4c35-80de-3bbf58c0df66-kube-api-access-5flkl\") pod \"marketplace-operator-79b997595-jtxkm\" (UID: \"c886bf87-12c2-4c35-80de-3bbf58c0df66\") " pod="openshift-marketplace/marketplace-operator-79b997595-jtxkm" Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.312152 4779 generic.go:334] "Generic (PLEG): container finished" podID="7db7e11b-22e3-45bf-a365-0a3583e0d52d" containerID="5e92500e82c512735102892bc85994c566b09dcd679c4d69f13b8a72df84371f" exitCode=0 Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.312227 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rmf6p" event={"ID":"7db7e11b-22e3-45bf-a365-0a3583e0d52d","Type":"ContainerDied","Data":"5e92500e82c512735102892bc85994c566b09dcd679c4d69f13b8a72df84371f"} Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.313788 4779 generic.go:334] "Generic (PLEG): container finished" podID="cb9c789a-4f39-46ee-8e61-5e39d65daf38" containerID="cd8dca10d6a88af03c12fe6a564136dce54a84bd63370b28324793527bf20679" exitCode=0 Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.313851 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-dkmtc" event={"ID":"cb9c789a-4f39-46ee-8e61-5e39d65daf38","Type":"ContainerDied","Data":"cd8dca10d6a88af03c12fe6a564136dce54a84bd63370b28324793527bf20679"} Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.317392 4779 generic.go:334] "Generic (PLEG): container finished" podID="d480d7e4-9bc7-40ed-ab03-d091d67a7a9a" containerID="a1ed42357c550a1f773b7b75502324a32c8a15da49a6d36bf7c4984def499011" exitCode=0 Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.317428 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z68v6" event={"ID":"d480d7e4-9bc7-40ed-ab03-d091d67a7a9a","Type":"ContainerDied","Data":"a1ed42357c550a1f773b7b75502324a32c8a15da49a6d36bf7c4984def499011"} Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.384499 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5flkl\" (UniqueName: \"kubernetes.io/projected/c886bf87-12c2-4c35-80de-3bbf58c0df66-kube-api-access-5flkl\") pod \"marketplace-operator-79b997595-jtxkm\" (UID: \"c886bf87-12c2-4c35-80de-3bbf58c0df66\") " pod="openshift-marketplace/marketplace-operator-79b997595-jtxkm" Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.384572 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/c886bf87-12c2-4c35-80de-3bbf58c0df66-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-jtxkm\" (UID: \"c886bf87-12c2-4c35-80de-3bbf58c0df66\") " pod="openshift-marketplace/marketplace-operator-79b997595-jtxkm" Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.384639 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c886bf87-12c2-4c35-80de-3bbf58c0df66-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-jtxkm\" (UID: \"c886bf87-12c2-4c35-80de-3bbf58c0df66\") " pod="openshift-marketplace/marketplace-operator-79b997595-jtxkm" Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.386047 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c886bf87-12c2-4c35-80de-3bbf58c0df66-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-jtxkm\" (UID: \"c886bf87-12c2-4c35-80de-3bbf58c0df66\") " pod="openshift-marketplace/marketplace-operator-79b997595-jtxkm" Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.390980 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/c886bf87-12c2-4c35-80de-3bbf58c0df66-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-jtxkm\" (UID: \"c886bf87-12c2-4c35-80de-3bbf58c0df66\") " pod="openshift-marketplace/marketplace-operator-79b997595-jtxkm" Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.403407 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5flkl\" (UniqueName: \"kubernetes.io/projected/c886bf87-12c2-4c35-80de-3bbf58c0df66-kube-api-access-5flkl\") pod \"marketplace-operator-79b997595-jtxkm\" (UID: \"c886bf87-12c2-4c35-80de-3bbf58c0df66\") " pod="openshift-marketplace/marketplace-operator-79b997595-jtxkm" Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.570390 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-jtxkm" Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.573405 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-z68v6" Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.580494 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-d2rck" Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.586098 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8pkd5" Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.601586 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rmf6p" Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.606221 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-dkmtc" Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.690348 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xdnpg\" (UniqueName: \"kubernetes.io/projected/a21410c1-e475-491e-9298-2bbba632e116-kube-api-access-xdnpg\") pod \"a21410c1-e475-491e-9298-2bbba632e116\" (UID: \"a21410c1-e475-491e-9298-2bbba632e116\") " Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.690394 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7db7e11b-22e3-45bf-a365-0a3583e0d52d-utilities\") pod \"7db7e11b-22e3-45bf-a365-0a3583e0d52d\" (UID: \"7db7e11b-22e3-45bf-a365-0a3583e0d52d\") " Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.690416 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/cb9c789a-4f39-46ee-8e61-5e39d65daf38-marketplace-operator-metrics\") pod \"cb9c789a-4f39-46ee-8e61-5e39d65daf38\" (UID: \"cb9c789a-4f39-46ee-8e61-5e39d65daf38\") " Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.690445 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e2a15850-8417-40bc-8d7a-55ad976e0e47-catalog-content\") pod \"e2a15850-8417-40bc-8d7a-55ad976e0e47\" (UID: \"e2a15850-8417-40bc-8d7a-55ad976e0e47\") " Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.690488 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7db7e11b-22e3-45bf-a365-0a3583e0d52d-catalog-content\") pod \"7db7e11b-22e3-45bf-a365-0a3583e0d52d\" (UID: \"7db7e11b-22e3-45bf-a365-0a3583e0d52d\") " Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.690513 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8p7w2\" (UniqueName: \"kubernetes.io/projected/d480d7e4-9bc7-40ed-ab03-d091d67a7a9a-kube-api-access-8p7w2\") pod \"d480d7e4-9bc7-40ed-ab03-d091d67a7a9a\" (UID: \"d480d7e4-9bc7-40ed-ab03-d091d67a7a9a\") " Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.690546 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gsj7h\" (UniqueName: \"kubernetes.io/projected/e2a15850-8417-40bc-8d7a-55ad976e0e47-kube-api-access-gsj7h\") pod \"e2a15850-8417-40bc-8d7a-55ad976e0e47\" (UID: \"e2a15850-8417-40bc-8d7a-55ad976e0e47\") " Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.690570 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d480d7e4-9bc7-40ed-ab03-d091d67a7a9a-utilities\") pod \"d480d7e4-9bc7-40ed-ab03-d091d67a7a9a\" (UID: \"d480d7e4-9bc7-40ed-ab03-d091d67a7a9a\") " Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.690589 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2p6jj\" (UniqueName: \"kubernetes.io/projected/7db7e11b-22e3-45bf-a365-0a3583e0d52d-kube-api-access-2p6jj\") pod \"7db7e11b-22e3-45bf-a365-0a3583e0d52d\" (UID: \"7db7e11b-22e3-45bf-a365-0a3583e0d52d\") " Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.690615 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/cb9c789a-4f39-46ee-8e61-5e39d65daf38-marketplace-trusted-ca\") pod \"cb9c789a-4f39-46ee-8e61-5e39d65daf38\" (UID: \"cb9c789a-4f39-46ee-8e61-5e39d65daf38\") " Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.690641 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e2a15850-8417-40bc-8d7a-55ad976e0e47-utilities\") pod \"e2a15850-8417-40bc-8d7a-55ad976e0e47\" (UID: \"e2a15850-8417-40bc-8d7a-55ad976e0e47\") " Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.690661 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a21410c1-e475-491e-9298-2bbba632e116-catalog-content\") pod \"a21410c1-e475-491e-9298-2bbba632e116\" (UID: \"a21410c1-e475-491e-9298-2bbba632e116\") " Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.690679 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d480d7e4-9bc7-40ed-ab03-d091d67a7a9a-catalog-content\") pod \"d480d7e4-9bc7-40ed-ab03-d091d67a7a9a\" (UID: \"d480d7e4-9bc7-40ed-ab03-d091d67a7a9a\") " Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.690700 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a21410c1-e475-491e-9298-2bbba632e116-utilities\") pod \"a21410c1-e475-491e-9298-2bbba632e116\" (UID: \"a21410c1-e475-491e-9298-2bbba632e116\") " Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.690717 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fv498\" (UniqueName: \"kubernetes.io/projected/cb9c789a-4f39-46ee-8e61-5e39d65daf38-kube-api-access-fv498\") pod \"cb9c789a-4f39-46ee-8e61-5e39d65daf38\" (UID: \"cb9c789a-4f39-46ee-8e61-5e39d65daf38\") " Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.692593 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e2a15850-8417-40bc-8d7a-55ad976e0e47-utilities" (OuterVolumeSpecName: "utilities") pod "e2a15850-8417-40bc-8d7a-55ad976e0e47" (UID: "e2a15850-8417-40bc-8d7a-55ad976e0e47"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.694368 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d480d7e4-9bc7-40ed-ab03-d091d67a7a9a-utilities" (OuterVolumeSpecName: "utilities") pod "d480d7e4-9bc7-40ed-ab03-d091d67a7a9a" (UID: "d480d7e4-9bc7-40ed-ab03-d091d67a7a9a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.694481 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb9c789a-4f39-46ee-8e61-5e39d65daf38-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "cb9c789a-4f39-46ee-8e61-5e39d65daf38" (UID: "cb9c789a-4f39-46ee-8e61-5e39d65daf38"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.696045 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e2a15850-8417-40bc-8d7a-55ad976e0e47-kube-api-access-gsj7h" (OuterVolumeSpecName: "kube-api-access-gsj7h") pod "e2a15850-8417-40bc-8d7a-55ad976e0e47" (UID: "e2a15850-8417-40bc-8d7a-55ad976e0e47"). InnerVolumeSpecName "kube-api-access-gsj7h". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.696266 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7db7e11b-22e3-45bf-a365-0a3583e0d52d-utilities" (OuterVolumeSpecName: "utilities") pod "7db7e11b-22e3-45bf-a365-0a3583e0d52d" (UID: "7db7e11b-22e3-45bf-a365-0a3583e0d52d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.696753 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cb9c789a-4f39-46ee-8e61-5e39d65daf38-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "cb9c789a-4f39-46ee-8e61-5e39d65daf38" (UID: "cb9c789a-4f39-46ee-8e61-5e39d65daf38"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.697118 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7db7e11b-22e3-45bf-a365-0a3583e0d52d-kube-api-access-2p6jj" (OuterVolumeSpecName: "kube-api-access-2p6jj") pod "7db7e11b-22e3-45bf-a365-0a3583e0d52d" (UID: "7db7e11b-22e3-45bf-a365-0a3583e0d52d"). InnerVolumeSpecName "kube-api-access-2p6jj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.697732 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a21410c1-e475-491e-9298-2bbba632e116-utilities" (OuterVolumeSpecName: "utilities") pod "a21410c1-e475-491e-9298-2bbba632e116" (UID: "a21410c1-e475-491e-9298-2bbba632e116"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.699525 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d480d7e4-9bc7-40ed-ab03-d091d67a7a9a-kube-api-access-8p7w2" (OuterVolumeSpecName: "kube-api-access-8p7w2") pod "d480d7e4-9bc7-40ed-ab03-d091d67a7a9a" (UID: "d480d7e4-9bc7-40ed-ab03-d091d67a7a9a"). InnerVolumeSpecName "kube-api-access-8p7w2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.700764 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cb9c789a-4f39-46ee-8e61-5e39d65daf38-kube-api-access-fv498" (OuterVolumeSpecName: "kube-api-access-fv498") pod "cb9c789a-4f39-46ee-8e61-5e39d65daf38" (UID: "cb9c789a-4f39-46ee-8e61-5e39d65daf38"). InnerVolumeSpecName "kube-api-access-fv498". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.699243 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a21410c1-e475-491e-9298-2bbba632e116-kube-api-access-xdnpg" (OuterVolumeSpecName: "kube-api-access-xdnpg") pod "a21410c1-e475-491e-9298-2bbba632e116" (UID: "a21410c1-e475-491e-9298-2bbba632e116"). InnerVolumeSpecName "kube-api-access-xdnpg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.745917 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a21410c1-e475-491e-9298-2bbba632e116-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a21410c1-e475-491e-9298-2bbba632e116" (UID: "a21410c1-e475-491e-9298-2bbba632e116"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.756431 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7db7e11b-22e3-45bf-a365-0a3583e0d52d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7db7e11b-22e3-45bf-a365-0a3583e0d52d" (UID: "7db7e11b-22e3-45bf-a365-0a3583e0d52d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.782078 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d480d7e4-9bc7-40ed-ab03-d091d67a7a9a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d480d7e4-9bc7-40ed-ab03-d091d67a7a9a" (UID: "d480d7e4-9bc7-40ed-ab03-d091d67a7a9a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.786664 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-jtxkm"] Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.792859 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d480d7e4-9bc7-40ed-ab03-d091d67a7a9a-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.792976 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2p6jj\" (UniqueName: \"kubernetes.io/projected/7db7e11b-22e3-45bf-a365-0a3583e0d52d-kube-api-access-2p6jj\") on node \"crc\" DevicePath \"\"" Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.792994 4779 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/cb9c789a-4f39-46ee-8e61-5e39d65daf38-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.793004 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e2a15850-8417-40bc-8d7a-55ad976e0e47-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.793016 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a21410c1-e475-491e-9298-2bbba632e116-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.793084 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d480d7e4-9bc7-40ed-ab03-d091d67a7a9a-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.793096 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a21410c1-e475-491e-9298-2bbba632e116-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.793107 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fv498\" (UniqueName: \"kubernetes.io/projected/cb9c789a-4f39-46ee-8e61-5e39d65daf38-kube-api-access-fv498\") on node \"crc\" DevicePath \"\"" Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.793118 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xdnpg\" (UniqueName: \"kubernetes.io/projected/a21410c1-e475-491e-9298-2bbba632e116-kube-api-access-xdnpg\") on node \"crc\" DevicePath \"\"" Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.793128 4779 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/cb9c789a-4f39-46ee-8e61-5e39d65daf38-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.793139 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7db7e11b-22e3-45bf-a365-0a3583e0d52d-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.793149 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7db7e11b-22e3-45bf-a365-0a3583e0d52d-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.793162 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8p7w2\" (UniqueName: \"kubernetes.io/projected/d480d7e4-9bc7-40ed-ab03-d091d67a7a9a-kube-api-access-8p7w2\") on node \"crc\" DevicePath \"\"" Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.793174 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gsj7h\" (UniqueName: \"kubernetes.io/projected/e2a15850-8417-40bc-8d7a-55ad976e0e47-kube-api-access-gsj7h\") on node \"crc\" DevicePath \"\"" Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.819653 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e2a15850-8417-40bc-8d7a-55ad976e0e47-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e2a15850-8417-40bc-8d7a-55ad976e0e47" (UID: "e2a15850-8417-40bc-8d7a-55ad976e0e47"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:33:35 crc kubenswrapper[4779]: I0929 09:33:35.894638 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e2a15850-8417-40bc-8d7a-55ad976e0e47-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 09:33:36 crc kubenswrapper[4779]: I0929 09:33:36.338715 4779 generic.go:334] "Generic (PLEG): container finished" podID="e2a15850-8417-40bc-8d7a-55ad976e0e47" containerID="ec765e95f83be37c1e1c12a891a560b6397519cf394c96396f8bab65f19d253c" exitCode=0 Sep 29 09:33:36 crc kubenswrapper[4779]: I0929 09:33:36.339138 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8pkd5" Sep 29 09:33:36 crc kubenswrapper[4779]: I0929 09:33:36.342666 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8pkd5" event={"ID":"e2a15850-8417-40bc-8d7a-55ad976e0e47","Type":"ContainerDied","Data":"ec765e95f83be37c1e1c12a891a560b6397519cf394c96396f8bab65f19d253c"} Sep 29 09:33:36 crc kubenswrapper[4779]: I0929 09:33:36.342702 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8pkd5" event={"ID":"e2a15850-8417-40bc-8d7a-55ad976e0e47","Type":"ContainerDied","Data":"92e657e2bc7b831fb7d7e284c16d60c408bddac67d62de87db50b5d00deece26"} Sep 29 09:33:36 crc kubenswrapper[4779]: I0929 09:33:36.342718 4779 scope.go:117] "RemoveContainer" containerID="ec765e95f83be37c1e1c12a891a560b6397519cf394c96396f8bab65f19d253c" Sep 29 09:33:36 crc kubenswrapper[4779]: I0929 09:33:36.344061 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-jtxkm" event={"ID":"c886bf87-12c2-4c35-80de-3bbf58c0df66","Type":"ContainerStarted","Data":"3b574ca7fa3204a14deae167cd9a62c95a5c7038cd8e237b5b033cc1b6dd56bb"} Sep 29 09:33:36 crc kubenswrapper[4779]: I0929 09:33:36.344083 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-jtxkm" event={"ID":"c886bf87-12c2-4c35-80de-3bbf58c0df66","Type":"ContainerStarted","Data":"89fe42381924f9f7f414f9dfda3c7926b72638dc23f16438b1bf6e344ba6283f"} Sep 29 09:33:36 crc kubenswrapper[4779]: I0929 09:33:36.345048 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-jtxkm" Sep 29 09:33:36 crc kubenswrapper[4779]: I0929 09:33:36.350693 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-jtxkm" Sep 29 09:33:36 crc kubenswrapper[4779]: I0929 09:33:36.351166 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rmf6p" event={"ID":"7db7e11b-22e3-45bf-a365-0a3583e0d52d","Type":"ContainerDied","Data":"80c3c2f519eaba2a5a4e5a169a68258d358c961e78eb5aa6a5a9a66c5ad7a4ae"} Sep 29 09:33:36 crc kubenswrapper[4779]: I0929 09:33:36.351248 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rmf6p" Sep 29 09:33:36 crc kubenswrapper[4779]: I0929 09:33:36.353679 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-dkmtc" event={"ID":"cb9c789a-4f39-46ee-8e61-5e39d65daf38","Type":"ContainerDied","Data":"fd1c20a726a5ad6aafc038bde020c7bcf11cd8cb9bca1a6e277cfe6fcd095969"} Sep 29 09:33:36 crc kubenswrapper[4779]: I0929 09:33:36.353778 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-dkmtc" Sep 29 09:33:36 crc kubenswrapper[4779]: I0929 09:33:36.358896 4779 scope.go:117] "RemoveContainer" containerID="b41e530e26fbef13431ca6b71c9e061de9db54d3aa9b571fc42585143f5d5295" Sep 29 09:33:36 crc kubenswrapper[4779]: I0929 09:33:36.362627 4779 generic.go:334] "Generic (PLEG): container finished" podID="a21410c1-e475-491e-9298-2bbba632e116" containerID="ca564e6c76ad08b6bb1f8b117f35b3f718a613c4cb4d3d8d447c4a352d8a49da" exitCode=0 Sep 29 09:33:36 crc kubenswrapper[4779]: I0929 09:33:36.362693 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d2rck" event={"ID":"a21410c1-e475-491e-9298-2bbba632e116","Type":"ContainerDied","Data":"ca564e6c76ad08b6bb1f8b117f35b3f718a613c4cb4d3d8d447c4a352d8a49da"} Sep 29 09:33:36 crc kubenswrapper[4779]: I0929 09:33:36.362697 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-jtxkm" podStartSLOduration=1.362681426 podStartE2EDuration="1.362681426s" podCreationTimestamp="2025-09-29 09:33:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:33:36.35919393 +0000 UTC m=+248.340517844" watchObservedRunningTime="2025-09-29 09:33:36.362681426 +0000 UTC m=+248.344005330" Sep 29 09:33:36 crc kubenswrapper[4779]: I0929 09:33:36.362721 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d2rck" event={"ID":"a21410c1-e475-491e-9298-2bbba632e116","Type":"ContainerDied","Data":"560463f99775811d5c6f8404a886d1c9b7d2f33812d319117d8e48c8b4f29312"} Sep 29 09:33:36 crc kubenswrapper[4779]: I0929 09:33:36.362824 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-d2rck" Sep 29 09:33:36 crc kubenswrapper[4779]: I0929 09:33:36.366190 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z68v6" event={"ID":"d480d7e4-9bc7-40ed-ab03-d091d67a7a9a","Type":"ContainerDied","Data":"411d96155f06c5750fe57d49c007396a0a9fef4975583b0f7a994cba0d8523ab"} Sep 29 09:33:36 crc kubenswrapper[4779]: I0929 09:33:36.366223 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-z68v6" Sep 29 09:33:36 crc kubenswrapper[4779]: I0929 09:33:36.376094 4779 scope.go:117] "RemoveContainer" containerID="13d9d0ed22e9d20f11e95b6540607ca89b16297736dc274f7beacfef42931b4a" Sep 29 09:33:36 crc kubenswrapper[4779]: I0929 09:33:36.442223 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-8pkd5"] Sep 29 09:33:36 crc kubenswrapper[4779]: I0929 09:33:36.444957 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-8pkd5"] Sep 29 09:33:36 crc kubenswrapper[4779]: I0929 09:33:36.448687 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rmf6p"] Sep 29 09:33:36 crc kubenswrapper[4779]: I0929 09:33:36.451788 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-rmf6p"] Sep 29 09:33:36 crc kubenswrapper[4779]: I0929 09:33:36.453491 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-z68v6"] Sep 29 09:33:36 crc kubenswrapper[4779]: I0929 09:33:36.455722 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-z68v6"] Sep 29 09:33:36 crc kubenswrapper[4779]: I0929 09:33:36.456031 4779 scope.go:117] "RemoveContainer" containerID="ec765e95f83be37c1e1c12a891a560b6397519cf394c96396f8bab65f19d253c" Sep 29 09:33:36 crc kubenswrapper[4779]: E0929 09:33:36.456474 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ec765e95f83be37c1e1c12a891a560b6397519cf394c96396f8bab65f19d253c\": container with ID starting with ec765e95f83be37c1e1c12a891a560b6397519cf394c96396f8bab65f19d253c not found: ID does not exist" containerID="ec765e95f83be37c1e1c12a891a560b6397519cf394c96396f8bab65f19d253c" Sep 29 09:33:36 crc kubenswrapper[4779]: I0929 09:33:36.456509 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ec765e95f83be37c1e1c12a891a560b6397519cf394c96396f8bab65f19d253c"} err="failed to get container status \"ec765e95f83be37c1e1c12a891a560b6397519cf394c96396f8bab65f19d253c\": rpc error: code = NotFound desc = could not find container \"ec765e95f83be37c1e1c12a891a560b6397519cf394c96396f8bab65f19d253c\": container with ID starting with ec765e95f83be37c1e1c12a891a560b6397519cf394c96396f8bab65f19d253c not found: ID does not exist" Sep 29 09:33:36 crc kubenswrapper[4779]: I0929 09:33:36.456533 4779 scope.go:117] "RemoveContainer" containerID="b41e530e26fbef13431ca6b71c9e061de9db54d3aa9b571fc42585143f5d5295" Sep 29 09:33:36 crc kubenswrapper[4779]: E0929 09:33:36.456811 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b41e530e26fbef13431ca6b71c9e061de9db54d3aa9b571fc42585143f5d5295\": container with ID starting with b41e530e26fbef13431ca6b71c9e061de9db54d3aa9b571fc42585143f5d5295 not found: ID does not exist" containerID="b41e530e26fbef13431ca6b71c9e061de9db54d3aa9b571fc42585143f5d5295" Sep 29 09:33:36 crc kubenswrapper[4779]: I0929 09:33:36.456843 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b41e530e26fbef13431ca6b71c9e061de9db54d3aa9b571fc42585143f5d5295"} err="failed to get container status \"b41e530e26fbef13431ca6b71c9e061de9db54d3aa9b571fc42585143f5d5295\": rpc error: code = NotFound desc = could not find container \"b41e530e26fbef13431ca6b71c9e061de9db54d3aa9b571fc42585143f5d5295\": container with ID starting with b41e530e26fbef13431ca6b71c9e061de9db54d3aa9b571fc42585143f5d5295 not found: ID does not exist" Sep 29 09:33:36 crc kubenswrapper[4779]: I0929 09:33:36.456865 4779 scope.go:117] "RemoveContainer" containerID="13d9d0ed22e9d20f11e95b6540607ca89b16297736dc274f7beacfef42931b4a" Sep 29 09:33:36 crc kubenswrapper[4779]: E0929 09:33:36.457193 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"13d9d0ed22e9d20f11e95b6540607ca89b16297736dc274f7beacfef42931b4a\": container with ID starting with 13d9d0ed22e9d20f11e95b6540607ca89b16297736dc274f7beacfef42931b4a not found: ID does not exist" containerID="13d9d0ed22e9d20f11e95b6540607ca89b16297736dc274f7beacfef42931b4a" Sep 29 09:33:36 crc kubenswrapper[4779]: I0929 09:33:36.457222 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"13d9d0ed22e9d20f11e95b6540607ca89b16297736dc274f7beacfef42931b4a"} err="failed to get container status \"13d9d0ed22e9d20f11e95b6540607ca89b16297736dc274f7beacfef42931b4a\": rpc error: code = NotFound desc = could not find container \"13d9d0ed22e9d20f11e95b6540607ca89b16297736dc274f7beacfef42931b4a\": container with ID starting with 13d9d0ed22e9d20f11e95b6540607ca89b16297736dc274f7beacfef42931b4a not found: ID does not exist" Sep 29 09:33:36 crc kubenswrapper[4779]: I0929 09:33:36.457242 4779 scope.go:117] "RemoveContainer" containerID="5e92500e82c512735102892bc85994c566b09dcd679c4d69f13b8a72df84371f" Sep 29 09:33:36 crc kubenswrapper[4779]: I0929 09:33:36.458529 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-dkmtc"] Sep 29 09:33:36 crc kubenswrapper[4779]: I0929 09:33:36.460903 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-dkmtc"] Sep 29 09:33:36 crc kubenswrapper[4779]: I0929 09:33:36.463315 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-d2rck"] Sep 29 09:33:36 crc kubenswrapper[4779]: I0929 09:33:36.466159 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-d2rck"] Sep 29 09:33:36 crc kubenswrapper[4779]: I0929 09:33:36.468615 4779 scope.go:117] "RemoveContainer" containerID="6f0c020b15aa3dd056bf86fb132eb7d348d0f83a55d3a9432221c89174ff24c2" Sep 29 09:33:36 crc kubenswrapper[4779]: I0929 09:33:36.488284 4779 scope.go:117] "RemoveContainer" containerID="11e22128abf4144e6386a7f1e658a211669cc8409eba0c55b12cbeb95458c873" Sep 29 09:33:36 crc kubenswrapper[4779]: I0929 09:33:36.502173 4779 scope.go:117] "RemoveContainer" containerID="cd8dca10d6a88af03c12fe6a564136dce54a84bd63370b28324793527bf20679" Sep 29 09:33:36 crc kubenswrapper[4779]: I0929 09:33:36.541134 4779 scope.go:117] "RemoveContainer" containerID="ca564e6c76ad08b6bb1f8b117f35b3f718a613c4cb4d3d8d447c4a352d8a49da" Sep 29 09:33:36 crc kubenswrapper[4779]: I0929 09:33:36.575321 4779 scope.go:117] "RemoveContainer" containerID="6b671894e65af467d354cac02d14d4b408c9ff506710b63e3812b0294fd8194a" Sep 29 09:33:36 crc kubenswrapper[4779]: I0929 09:33:36.609618 4779 scope.go:117] "RemoveContainer" containerID="9a06e3c492308c612907c34f3531763addff3f4b78c348d823f78a683e863a56" Sep 29 09:33:36 crc kubenswrapper[4779]: I0929 09:33:36.627848 4779 scope.go:117] "RemoveContainer" containerID="ca564e6c76ad08b6bb1f8b117f35b3f718a613c4cb4d3d8d447c4a352d8a49da" Sep 29 09:33:36 crc kubenswrapper[4779]: E0929 09:33:36.628346 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ca564e6c76ad08b6bb1f8b117f35b3f718a613c4cb4d3d8d447c4a352d8a49da\": container with ID starting with ca564e6c76ad08b6bb1f8b117f35b3f718a613c4cb4d3d8d447c4a352d8a49da not found: ID does not exist" containerID="ca564e6c76ad08b6bb1f8b117f35b3f718a613c4cb4d3d8d447c4a352d8a49da" Sep 29 09:33:36 crc kubenswrapper[4779]: I0929 09:33:36.628385 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ca564e6c76ad08b6bb1f8b117f35b3f718a613c4cb4d3d8d447c4a352d8a49da"} err="failed to get container status \"ca564e6c76ad08b6bb1f8b117f35b3f718a613c4cb4d3d8d447c4a352d8a49da\": rpc error: code = NotFound desc = could not find container \"ca564e6c76ad08b6bb1f8b117f35b3f718a613c4cb4d3d8d447c4a352d8a49da\": container with ID starting with ca564e6c76ad08b6bb1f8b117f35b3f718a613c4cb4d3d8d447c4a352d8a49da not found: ID does not exist" Sep 29 09:33:36 crc kubenswrapper[4779]: I0929 09:33:36.628412 4779 scope.go:117] "RemoveContainer" containerID="6b671894e65af467d354cac02d14d4b408c9ff506710b63e3812b0294fd8194a" Sep 29 09:33:36 crc kubenswrapper[4779]: E0929 09:33:36.628792 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6b671894e65af467d354cac02d14d4b408c9ff506710b63e3812b0294fd8194a\": container with ID starting with 6b671894e65af467d354cac02d14d4b408c9ff506710b63e3812b0294fd8194a not found: ID does not exist" containerID="6b671894e65af467d354cac02d14d4b408c9ff506710b63e3812b0294fd8194a" Sep 29 09:33:36 crc kubenswrapper[4779]: I0929 09:33:36.628814 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b671894e65af467d354cac02d14d4b408c9ff506710b63e3812b0294fd8194a"} err="failed to get container status \"6b671894e65af467d354cac02d14d4b408c9ff506710b63e3812b0294fd8194a\": rpc error: code = NotFound desc = could not find container \"6b671894e65af467d354cac02d14d4b408c9ff506710b63e3812b0294fd8194a\": container with ID starting with 6b671894e65af467d354cac02d14d4b408c9ff506710b63e3812b0294fd8194a not found: ID does not exist" Sep 29 09:33:36 crc kubenswrapper[4779]: I0929 09:33:36.628830 4779 scope.go:117] "RemoveContainer" containerID="9a06e3c492308c612907c34f3531763addff3f4b78c348d823f78a683e863a56" Sep 29 09:33:36 crc kubenswrapper[4779]: E0929 09:33:36.629173 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9a06e3c492308c612907c34f3531763addff3f4b78c348d823f78a683e863a56\": container with ID starting with 9a06e3c492308c612907c34f3531763addff3f4b78c348d823f78a683e863a56 not found: ID does not exist" containerID="9a06e3c492308c612907c34f3531763addff3f4b78c348d823f78a683e863a56" Sep 29 09:33:36 crc kubenswrapper[4779]: I0929 09:33:36.629218 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a06e3c492308c612907c34f3531763addff3f4b78c348d823f78a683e863a56"} err="failed to get container status \"9a06e3c492308c612907c34f3531763addff3f4b78c348d823f78a683e863a56\": rpc error: code = NotFound desc = could not find container \"9a06e3c492308c612907c34f3531763addff3f4b78c348d823f78a683e863a56\": container with ID starting with 9a06e3c492308c612907c34f3531763addff3f4b78c348d823f78a683e863a56 not found: ID does not exist" Sep 29 09:33:36 crc kubenswrapper[4779]: I0929 09:33:36.629237 4779 scope.go:117] "RemoveContainer" containerID="a1ed42357c550a1f773b7b75502324a32c8a15da49a6d36bf7c4984def499011" Sep 29 09:33:36 crc kubenswrapper[4779]: I0929 09:33:36.639293 4779 scope.go:117] "RemoveContainer" containerID="91cc7b454fe6dddf4e4eb073fe4aa38eef8d68a13022deee2df7c3921107d8ce" Sep 29 09:33:36 crc kubenswrapper[4779]: I0929 09:33:36.649841 4779 scope.go:117] "RemoveContainer" containerID="057d182fb75de135503a3cd812d8215c9cfd3568a159737254e7b424b4d83301" Sep 29 09:33:36 crc kubenswrapper[4779]: I0929 09:33:36.720947 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7db7e11b-22e3-45bf-a365-0a3583e0d52d" path="/var/lib/kubelet/pods/7db7e11b-22e3-45bf-a365-0a3583e0d52d/volumes" Sep 29 09:33:36 crc kubenswrapper[4779]: I0929 09:33:36.721533 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a21410c1-e475-491e-9298-2bbba632e116" path="/var/lib/kubelet/pods/a21410c1-e475-491e-9298-2bbba632e116/volumes" Sep 29 09:33:36 crc kubenswrapper[4779]: I0929 09:33:36.722155 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cb9c789a-4f39-46ee-8e61-5e39d65daf38" path="/var/lib/kubelet/pods/cb9c789a-4f39-46ee-8e61-5e39d65daf38/volumes" Sep 29 09:33:36 crc kubenswrapper[4779]: I0929 09:33:36.723034 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d480d7e4-9bc7-40ed-ab03-d091d67a7a9a" path="/var/lib/kubelet/pods/d480d7e4-9bc7-40ed-ab03-d091d67a7a9a/volumes" Sep 29 09:33:36 crc kubenswrapper[4779]: I0929 09:33:36.723679 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e2a15850-8417-40bc-8d7a-55ad976e0e47" path="/var/lib/kubelet/pods/e2a15850-8417-40bc-8d7a-55ad976e0e47/volumes" Sep 29 09:33:37 crc kubenswrapper[4779]: I0929 09:33:37.369720 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-jsh2v"] Sep 29 09:33:37 crc kubenswrapper[4779]: E0929 09:33:37.369947 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d480d7e4-9bc7-40ed-ab03-d091d67a7a9a" containerName="extract-utilities" Sep 29 09:33:37 crc kubenswrapper[4779]: I0929 09:33:37.369962 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="d480d7e4-9bc7-40ed-ab03-d091d67a7a9a" containerName="extract-utilities" Sep 29 09:33:37 crc kubenswrapper[4779]: E0929 09:33:37.369972 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7db7e11b-22e3-45bf-a365-0a3583e0d52d" containerName="registry-server" Sep 29 09:33:37 crc kubenswrapper[4779]: I0929 09:33:37.369982 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="7db7e11b-22e3-45bf-a365-0a3583e0d52d" containerName="registry-server" Sep 29 09:33:37 crc kubenswrapper[4779]: E0929 09:33:37.369996 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d480d7e4-9bc7-40ed-ab03-d091d67a7a9a" containerName="extract-content" Sep 29 09:33:37 crc kubenswrapper[4779]: I0929 09:33:37.370006 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="d480d7e4-9bc7-40ed-ab03-d091d67a7a9a" containerName="extract-content" Sep 29 09:33:37 crc kubenswrapper[4779]: E0929 09:33:37.370016 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7db7e11b-22e3-45bf-a365-0a3583e0d52d" containerName="extract-content" Sep 29 09:33:37 crc kubenswrapper[4779]: I0929 09:33:37.370025 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="7db7e11b-22e3-45bf-a365-0a3583e0d52d" containerName="extract-content" Sep 29 09:33:37 crc kubenswrapper[4779]: E0929 09:33:37.370037 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2a15850-8417-40bc-8d7a-55ad976e0e47" containerName="registry-server" Sep 29 09:33:37 crc kubenswrapper[4779]: I0929 09:33:37.370045 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2a15850-8417-40bc-8d7a-55ad976e0e47" containerName="registry-server" Sep 29 09:33:37 crc kubenswrapper[4779]: E0929 09:33:37.370056 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb9c789a-4f39-46ee-8e61-5e39d65daf38" containerName="marketplace-operator" Sep 29 09:33:37 crc kubenswrapper[4779]: I0929 09:33:37.370064 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb9c789a-4f39-46ee-8e61-5e39d65daf38" containerName="marketplace-operator" Sep 29 09:33:37 crc kubenswrapper[4779]: E0929 09:33:37.370075 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a21410c1-e475-491e-9298-2bbba632e116" containerName="registry-server" Sep 29 09:33:37 crc kubenswrapper[4779]: I0929 09:33:37.370083 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="a21410c1-e475-491e-9298-2bbba632e116" containerName="registry-server" Sep 29 09:33:37 crc kubenswrapper[4779]: E0929 09:33:37.370095 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7db7e11b-22e3-45bf-a365-0a3583e0d52d" containerName="extract-utilities" Sep 29 09:33:37 crc kubenswrapper[4779]: I0929 09:33:37.370134 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="7db7e11b-22e3-45bf-a365-0a3583e0d52d" containerName="extract-utilities" Sep 29 09:33:37 crc kubenswrapper[4779]: E0929 09:33:37.370147 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d480d7e4-9bc7-40ed-ab03-d091d67a7a9a" containerName="registry-server" Sep 29 09:33:37 crc kubenswrapper[4779]: I0929 09:33:37.370155 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="d480d7e4-9bc7-40ed-ab03-d091d67a7a9a" containerName="registry-server" Sep 29 09:33:37 crc kubenswrapper[4779]: E0929 09:33:37.370170 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a21410c1-e475-491e-9298-2bbba632e116" containerName="extract-content" Sep 29 09:33:37 crc kubenswrapper[4779]: I0929 09:33:37.370179 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="a21410c1-e475-491e-9298-2bbba632e116" containerName="extract-content" Sep 29 09:33:37 crc kubenswrapper[4779]: E0929 09:33:37.370193 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2a15850-8417-40bc-8d7a-55ad976e0e47" containerName="extract-content" Sep 29 09:33:37 crc kubenswrapper[4779]: I0929 09:33:37.370201 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2a15850-8417-40bc-8d7a-55ad976e0e47" containerName="extract-content" Sep 29 09:33:37 crc kubenswrapper[4779]: E0929 09:33:37.370214 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a21410c1-e475-491e-9298-2bbba632e116" containerName="extract-utilities" Sep 29 09:33:37 crc kubenswrapper[4779]: I0929 09:33:37.370222 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="a21410c1-e475-491e-9298-2bbba632e116" containerName="extract-utilities" Sep 29 09:33:37 crc kubenswrapper[4779]: E0929 09:33:37.370234 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2a15850-8417-40bc-8d7a-55ad976e0e47" containerName="extract-utilities" Sep 29 09:33:37 crc kubenswrapper[4779]: I0929 09:33:37.370242 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2a15850-8417-40bc-8d7a-55ad976e0e47" containerName="extract-utilities" Sep 29 09:33:37 crc kubenswrapper[4779]: I0929 09:33:37.370361 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2a15850-8417-40bc-8d7a-55ad976e0e47" containerName="registry-server" Sep 29 09:33:37 crc kubenswrapper[4779]: I0929 09:33:37.370375 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="7db7e11b-22e3-45bf-a365-0a3583e0d52d" containerName="registry-server" Sep 29 09:33:37 crc kubenswrapper[4779]: I0929 09:33:37.370388 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb9c789a-4f39-46ee-8e61-5e39d65daf38" containerName="marketplace-operator" Sep 29 09:33:37 crc kubenswrapper[4779]: I0929 09:33:37.370400 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="a21410c1-e475-491e-9298-2bbba632e116" containerName="registry-server" Sep 29 09:33:37 crc kubenswrapper[4779]: I0929 09:33:37.370410 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="d480d7e4-9bc7-40ed-ab03-d091d67a7a9a" containerName="registry-server" Sep 29 09:33:37 crc kubenswrapper[4779]: I0929 09:33:37.374423 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jsh2v" Sep 29 09:33:37 crc kubenswrapper[4779]: I0929 09:33:37.376637 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Sep 29 09:33:37 crc kubenswrapper[4779]: I0929 09:33:37.378857 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jsh2v"] Sep 29 09:33:37 crc kubenswrapper[4779]: I0929 09:33:37.514828 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fgzdb\" (UniqueName: \"kubernetes.io/projected/ec71db0f-c295-4583-8314-de4043a1ccdf-kube-api-access-fgzdb\") pod \"certified-operators-jsh2v\" (UID: \"ec71db0f-c295-4583-8314-de4043a1ccdf\") " pod="openshift-marketplace/certified-operators-jsh2v" Sep 29 09:33:37 crc kubenswrapper[4779]: I0929 09:33:37.514874 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ec71db0f-c295-4583-8314-de4043a1ccdf-utilities\") pod \"certified-operators-jsh2v\" (UID: \"ec71db0f-c295-4583-8314-de4043a1ccdf\") " pod="openshift-marketplace/certified-operators-jsh2v" Sep 29 09:33:37 crc kubenswrapper[4779]: I0929 09:33:37.514943 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ec71db0f-c295-4583-8314-de4043a1ccdf-catalog-content\") pod \"certified-operators-jsh2v\" (UID: \"ec71db0f-c295-4583-8314-de4043a1ccdf\") " pod="openshift-marketplace/certified-operators-jsh2v" Sep 29 09:33:37 crc kubenswrapper[4779]: I0929 09:33:37.571100 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-q5kll"] Sep 29 09:33:37 crc kubenswrapper[4779]: I0929 09:33:37.575743 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-q5kll" Sep 29 09:33:37 crc kubenswrapper[4779]: I0929 09:33:37.578847 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Sep 29 09:33:37 crc kubenswrapper[4779]: I0929 09:33:37.595515 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-q5kll"] Sep 29 09:33:37 crc kubenswrapper[4779]: I0929 09:33:37.629293 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fgzdb\" (UniqueName: \"kubernetes.io/projected/ec71db0f-c295-4583-8314-de4043a1ccdf-kube-api-access-fgzdb\") pod \"certified-operators-jsh2v\" (UID: \"ec71db0f-c295-4583-8314-de4043a1ccdf\") " pod="openshift-marketplace/certified-operators-jsh2v" Sep 29 09:33:37 crc kubenswrapper[4779]: I0929 09:33:37.629424 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ec71db0f-c295-4583-8314-de4043a1ccdf-utilities\") pod \"certified-operators-jsh2v\" (UID: \"ec71db0f-c295-4583-8314-de4043a1ccdf\") " pod="openshift-marketplace/certified-operators-jsh2v" Sep 29 09:33:37 crc kubenswrapper[4779]: I0929 09:33:37.629542 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ec71db0f-c295-4583-8314-de4043a1ccdf-catalog-content\") pod \"certified-operators-jsh2v\" (UID: \"ec71db0f-c295-4583-8314-de4043a1ccdf\") " pod="openshift-marketplace/certified-operators-jsh2v" Sep 29 09:33:37 crc kubenswrapper[4779]: I0929 09:33:37.630281 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ec71db0f-c295-4583-8314-de4043a1ccdf-utilities\") pod \"certified-operators-jsh2v\" (UID: \"ec71db0f-c295-4583-8314-de4043a1ccdf\") " pod="openshift-marketplace/certified-operators-jsh2v" Sep 29 09:33:37 crc kubenswrapper[4779]: I0929 09:33:37.630362 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ec71db0f-c295-4583-8314-de4043a1ccdf-catalog-content\") pod \"certified-operators-jsh2v\" (UID: \"ec71db0f-c295-4583-8314-de4043a1ccdf\") " pod="openshift-marketplace/certified-operators-jsh2v" Sep 29 09:33:37 crc kubenswrapper[4779]: I0929 09:33:37.653351 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fgzdb\" (UniqueName: \"kubernetes.io/projected/ec71db0f-c295-4583-8314-de4043a1ccdf-kube-api-access-fgzdb\") pod \"certified-operators-jsh2v\" (UID: \"ec71db0f-c295-4583-8314-de4043a1ccdf\") " pod="openshift-marketplace/certified-operators-jsh2v" Sep 29 09:33:37 crc kubenswrapper[4779]: I0929 09:33:37.698178 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jsh2v" Sep 29 09:33:37 crc kubenswrapper[4779]: I0929 09:33:37.731211 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b1e9d751-a654-48a0-b585-6865e294ce93-catalog-content\") pod \"community-operators-q5kll\" (UID: \"b1e9d751-a654-48a0-b585-6865e294ce93\") " pod="openshift-marketplace/community-operators-q5kll" Sep 29 09:33:37 crc kubenswrapper[4779]: I0929 09:33:37.731295 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rmjkw\" (UniqueName: \"kubernetes.io/projected/b1e9d751-a654-48a0-b585-6865e294ce93-kube-api-access-rmjkw\") pod \"community-operators-q5kll\" (UID: \"b1e9d751-a654-48a0-b585-6865e294ce93\") " pod="openshift-marketplace/community-operators-q5kll" Sep 29 09:33:37 crc kubenswrapper[4779]: I0929 09:33:37.731335 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b1e9d751-a654-48a0-b585-6865e294ce93-utilities\") pod \"community-operators-q5kll\" (UID: \"b1e9d751-a654-48a0-b585-6865e294ce93\") " pod="openshift-marketplace/community-operators-q5kll" Sep 29 09:33:37 crc kubenswrapper[4779]: I0929 09:33:37.835507 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rmjkw\" (UniqueName: \"kubernetes.io/projected/b1e9d751-a654-48a0-b585-6865e294ce93-kube-api-access-rmjkw\") pod \"community-operators-q5kll\" (UID: \"b1e9d751-a654-48a0-b585-6865e294ce93\") " pod="openshift-marketplace/community-operators-q5kll" Sep 29 09:33:37 crc kubenswrapper[4779]: I0929 09:33:37.835838 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b1e9d751-a654-48a0-b585-6865e294ce93-utilities\") pod \"community-operators-q5kll\" (UID: \"b1e9d751-a654-48a0-b585-6865e294ce93\") " pod="openshift-marketplace/community-operators-q5kll" Sep 29 09:33:37 crc kubenswrapper[4779]: I0929 09:33:37.835963 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b1e9d751-a654-48a0-b585-6865e294ce93-catalog-content\") pod \"community-operators-q5kll\" (UID: \"b1e9d751-a654-48a0-b585-6865e294ce93\") " pod="openshift-marketplace/community-operators-q5kll" Sep 29 09:33:37 crc kubenswrapper[4779]: I0929 09:33:37.836611 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b1e9d751-a654-48a0-b585-6865e294ce93-utilities\") pod \"community-operators-q5kll\" (UID: \"b1e9d751-a654-48a0-b585-6865e294ce93\") " pod="openshift-marketplace/community-operators-q5kll" Sep 29 09:33:37 crc kubenswrapper[4779]: I0929 09:33:37.836733 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b1e9d751-a654-48a0-b585-6865e294ce93-catalog-content\") pod \"community-operators-q5kll\" (UID: \"b1e9d751-a654-48a0-b585-6865e294ce93\") " pod="openshift-marketplace/community-operators-q5kll" Sep 29 09:33:37 crc kubenswrapper[4779]: I0929 09:33:37.854281 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rmjkw\" (UniqueName: \"kubernetes.io/projected/b1e9d751-a654-48a0-b585-6865e294ce93-kube-api-access-rmjkw\") pod \"community-operators-q5kll\" (UID: \"b1e9d751-a654-48a0-b585-6865e294ce93\") " pod="openshift-marketplace/community-operators-q5kll" Sep 29 09:33:37 crc kubenswrapper[4779]: I0929 09:33:37.865761 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jsh2v"] Sep 29 09:33:37 crc kubenswrapper[4779]: W0929 09:33:37.873118 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podec71db0f_c295_4583_8314_de4043a1ccdf.slice/crio-2258d5df1a9bb87819ca457a3cbf009358ecd8b3033080f402e960b6d7bca28d WatchSource:0}: Error finding container 2258d5df1a9bb87819ca457a3cbf009358ecd8b3033080f402e960b6d7bca28d: Status 404 returned error can't find the container with id 2258d5df1a9bb87819ca457a3cbf009358ecd8b3033080f402e960b6d7bca28d Sep 29 09:33:37 crc kubenswrapper[4779]: I0929 09:33:37.939788 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-q5kll" Sep 29 09:33:38 crc kubenswrapper[4779]: I0929 09:33:38.118649 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-q5kll"] Sep 29 09:33:38 crc kubenswrapper[4779]: I0929 09:33:38.397941 4779 generic.go:334] "Generic (PLEG): container finished" podID="ec71db0f-c295-4583-8314-de4043a1ccdf" containerID="bb511c8262e28e24d6f4c9fe7039df4d66adc523e7ca5aaafa1e77f92e336c49" exitCode=0 Sep 29 09:33:38 crc kubenswrapper[4779]: I0929 09:33:38.398023 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jsh2v" event={"ID":"ec71db0f-c295-4583-8314-de4043a1ccdf","Type":"ContainerDied","Data":"bb511c8262e28e24d6f4c9fe7039df4d66adc523e7ca5aaafa1e77f92e336c49"} Sep 29 09:33:38 crc kubenswrapper[4779]: I0929 09:33:38.398050 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jsh2v" event={"ID":"ec71db0f-c295-4583-8314-de4043a1ccdf","Type":"ContainerStarted","Data":"2258d5df1a9bb87819ca457a3cbf009358ecd8b3033080f402e960b6d7bca28d"} Sep 29 09:33:38 crc kubenswrapper[4779]: I0929 09:33:38.401935 4779 generic.go:334] "Generic (PLEG): container finished" podID="b1e9d751-a654-48a0-b585-6865e294ce93" containerID="2c8eacb26d4a9958aa56e858ce7a77b031b4f16de4b758d55cebc741a47675e6" exitCode=0 Sep 29 09:33:38 crc kubenswrapper[4779]: I0929 09:33:38.402261 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q5kll" event={"ID":"b1e9d751-a654-48a0-b585-6865e294ce93","Type":"ContainerDied","Data":"2c8eacb26d4a9958aa56e858ce7a77b031b4f16de4b758d55cebc741a47675e6"} Sep 29 09:33:38 crc kubenswrapper[4779]: I0929 09:33:38.402297 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q5kll" event={"ID":"b1e9d751-a654-48a0-b585-6865e294ce93","Type":"ContainerStarted","Data":"a806257217e9689b97f961ee71ef11d7c651e380eb8e779be54534a229956189"} Sep 29 09:33:39 crc kubenswrapper[4779]: I0929 09:33:39.407855 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q5kll" event={"ID":"b1e9d751-a654-48a0-b585-6865e294ce93","Type":"ContainerStarted","Data":"6aea5995e4effbbb55637ed06f33cad77753d2bfe467bcbbb385c9a6d31fe268"} Sep 29 09:33:39 crc kubenswrapper[4779]: I0929 09:33:39.768237 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-5rpgs"] Sep 29 09:33:39 crc kubenswrapper[4779]: I0929 09:33:39.769534 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5rpgs" Sep 29 09:33:39 crc kubenswrapper[4779]: I0929 09:33:39.773787 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Sep 29 09:33:39 crc kubenswrapper[4779]: I0929 09:33:39.779971 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5rpgs"] Sep 29 09:33:39 crc kubenswrapper[4779]: I0929 09:33:39.871695 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d4w2r\" (UniqueName: \"kubernetes.io/projected/d65a114a-c403-4e47-9496-eea1f6b50f8a-kube-api-access-d4w2r\") pod \"redhat-marketplace-5rpgs\" (UID: \"d65a114a-c403-4e47-9496-eea1f6b50f8a\") " pod="openshift-marketplace/redhat-marketplace-5rpgs" Sep 29 09:33:39 crc kubenswrapper[4779]: I0929 09:33:39.871787 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d65a114a-c403-4e47-9496-eea1f6b50f8a-utilities\") pod \"redhat-marketplace-5rpgs\" (UID: \"d65a114a-c403-4e47-9496-eea1f6b50f8a\") " pod="openshift-marketplace/redhat-marketplace-5rpgs" Sep 29 09:33:39 crc kubenswrapper[4779]: I0929 09:33:39.871958 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d65a114a-c403-4e47-9496-eea1f6b50f8a-catalog-content\") pod \"redhat-marketplace-5rpgs\" (UID: \"d65a114a-c403-4e47-9496-eea1f6b50f8a\") " pod="openshift-marketplace/redhat-marketplace-5rpgs" Sep 29 09:33:39 crc kubenswrapper[4779]: I0929 09:33:39.968402 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-zvkb9"] Sep 29 09:33:39 crc kubenswrapper[4779]: I0929 09:33:39.969591 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zvkb9" Sep 29 09:33:39 crc kubenswrapper[4779]: I0929 09:33:39.974045 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d65a114a-c403-4e47-9496-eea1f6b50f8a-utilities\") pod \"redhat-marketplace-5rpgs\" (UID: \"d65a114a-c403-4e47-9496-eea1f6b50f8a\") " pod="openshift-marketplace/redhat-marketplace-5rpgs" Sep 29 09:33:39 crc kubenswrapper[4779]: I0929 09:33:39.974296 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d65a114a-c403-4e47-9496-eea1f6b50f8a-catalog-content\") pod \"redhat-marketplace-5rpgs\" (UID: \"d65a114a-c403-4e47-9496-eea1f6b50f8a\") " pod="openshift-marketplace/redhat-marketplace-5rpgs" Sep 29 09:33:39 crc kubenswrapper[4779]: I0929 09:33:39.974371 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d4w2r\" (UniqueName: \"kubernetes.io/projected/d65a114a-c403-4e47-9496-eea1f6b50f8a-kube-api-access-d4w2r\") pod \"redhat-marketplace-5rpgs\" (UID: \"d65a114a-c403-4e47-9496-eea1f6b50f8a\") " pod="openshift-marketplace/redhat-marketplace-5rpgs" Sep 29 09:33:39 crc kubenswrapper[4779]: I0929 09:33:39.974693 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d65a114a-c403-4e47-9496-eea1f6b50f8a-utilities\") pod \"redhat-marketplace-5rpgs\" (UID: \"d65a114a-c403-4e47-9496-eea1f6b50f8a\") " pod="openshift-marketplace/redhat-marketplace-5rpgs" Sep 29 09:33:39 crc kubenswrapper[4779]: I0929 09:33:39.975132 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Sep 29 09:33:39 crc kubenswrapper[4779]: I0929 09:33:39.976219 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d65a114a-c403-4e47-9496-eea1f6b50f8a-catalog-content\") pod \"redhat-marketplace-5rpgs\" (UID: \"d65a114a-c403-4e47-9496-eea1f6b50f8a\") " pod="openshift-marketplace/redhat-marketplace-5rpgs" Sep 29 09:33:39 crc kubenswrapper[4779]: I0929 09:33:39.979129 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zvkb9"] Sep 29 09:33:40 crc kubenswrapper[4779]: I0929 09:33:40.005505 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d4w2r\" (UniqueName: \"kubernetes.io/projected/d65a114a-c403-4e47-9496-eea1f6b50f8a-kube-api-access-d4w2r\") pod \"redhat-marketplace-5rpgs\" (UID: \"d65a114a-c403-4e47-9496-eea1f6b50f8a\") " pod="openshift-marketplace/redhat-marketplace-5rpgs" Sep 29 09:33:40 crc kubenswrapper[4779]: I0929 09:33:40.075213 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9cab2663-d571-4988-b005-338943d811f5-utilities\") pod \"redhat-operators-zvkb9\" (UID: \"9cab2663-d571-4988-b005-338943d811f5\") " pod="openshift-marketplace/redhat-operators-zvkb9" Sep 29 09:33:40 crc kubenswrapper[4779]: I0929 09:33:40.075407 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9cab2663-d571-4988-b005-338943d811f5-catalog-content\") pod \"redhat-operators-zvkb9\" (UID: \"9cab2663-d571-4988-b005-338943d811f5\") " pod="openshift-marketplace/redhat-operators-zvkb9" Sep 29 09:33:40 crc kubenswrapper[4779]: I0929 09:33:40.075528 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tl7kj\" (UniqueName: \"kubernetes.io/projected/9cab2663-d571-4988-b005-338943d811f5-kube-api-access-tl7kj\") pod \"redhat-operators-zvkb9\" (UID: \"9cab2663-d571-4988-b005-338943d811f5\") " pod="openshift-marketplace/redhat-operators-zvkb9" Sep 29 09:33:40 crc kubenswrapper[4779]: I0929 09:33:40.083840 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5rpgs" Sep 29 09:33:40 crc kubenswrapper[4779]: I0929 09:33:40.177546 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9cab2663-d571-4988-b005-338943d811f5-utilities\") pod \"redhat-operators-zvkb9\" (UID: \"9cab2663-d571-4988-b005-338943d811f5\") " pod="openshift-marketplace/redhat-operators-zvkb9" Sep 29 09:33:40 crc kubenswrapper[4779]: I0929 09:33:40.177845 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9cab2663-d571-4988-b005-338943d811f5-catalog-content\") pod \"redhat-operators-zvkb9\" (UID: \"9cab2663-d571-4988-b005-338943d811f5\") " pod="openshift-marketplace/redhat-operators-zvkb9" Sep 29 09:33:40 crc kubenswrapper[4779]: I0929 09:33:40.177878 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tl7kj\" (UniqueName: \"kubernetes.io/projected/9cab2663-d571-4988-b005-338943d811f5-kube-api-access-tl7kj\") pod \"redhat-operators-zvkb9\" (UID: \"9cab2663-d571-4988-b005-338943d811f5\") " pod="openshift-marketplace/redhat-operators-zvkb9" Sep 29 09:33:40 crc kubenswrapper[4779]: I0929 09:33:40.178381 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9cab2663-d571-4988-b005-338943d811f5-utilities\") pod \"redhat-operators-zvkb9\" (UID: \"9cab2663-d571-4988-b005-338943d811f5\") " pod="openshift-marketplace/redhat-operators-zvkb9" Sep 29 09:33:40 crc kubenswrapper[4779]: I0929 09:33:40.178628 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9cab2663-d571-4988-b005-338943d811f5-catalog-content\") pod \"redhat-operators-zvkb9\" (UID: \"9cab2663-d571-4988-b005-338943d811f5\") " pod="openshift-marketplace/redhat-operators-zvkb9" Sep 29 09:33:40 crc kubenswrapper[4779]: I0929 09:33:40.195517 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tl7kj\" (UniqueName: \"kubernetes.io/projected/9cab2663-d571-4988-b005-338943d811f5-kube-api-access-tl7kj\") pod \"redhat-operators-zvkb9\" (UID: \"9cab2663-d571-4988-b005-338943d811f5\") " pod="openshift-marketplace/redhat-operators-zvkb9" Sep 29 09:33:40 crc kubenswrapper[4779]: I0929 09:33:40.281546 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5rpgs"] Sep 29 09:33:40 crc kubenswrapper[4779]: I0929 09:33:40.287973 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zvkb9" Sep 29 09:33:40 crc kubenswrapper[4779]: I0929 09:33:40.415834 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5rpgs" event={"ID":"d65a114a-c403-4e47-9496-eea1f6b50f8a","Type":"ContainerStarted","Data":"220fafc6b393f2db2123d3f2e0537a420bfa6d250b36f5549fd1bd7ed97c745d"} Sep 29 09:33:40 crc kubenswrapper[4779]: I0929 09:33:40.416212 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5rpgs" event={"ID":"d65a114a-c403-4e47-9496-eea1f6b50f8a","Type":"ContainerStarted","Data":"cfc0275e3472d4b2da802f95b2de9475bb306b468504bd13c3cd67eaaa1b16f6"} Sep 29 09:33:40 crc kubenswrapper[4779]: I0929 09:33:40.418195 4779 generic.go:334] "Generic (PLEG): container finished" podID="b1e9d751-a654-48a0-b585-6865e294ce93" containerID="6aea5995e4effbbb55637ed06f33cad77753d2bfe467bcbbb385c9a6d31fe268" exitCode=0 Sep 29 09:33:40 crc kubenswrapper[4779]: I0929 09:33:40.418290 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q5kll" event={"ID":"b1e9d751-a654-48a0-b585-6865e294ce93","Type":"ContainerDied","Data":"6aea5995e4effbbb55637ed06f33cad77753d2bfe467bcbbb385c9a6d31fe268"} Sep 29 09:33:40 crc kubenswrapper[4779]: I0929 09:33:40.427732 4779 generic.go:334] "Generic (PLEG): container finished" podID="ec71db0f-c295-4583-8314-de4043a1ccdf" containerID="65cce4d112b8269d135369fe95dd0345ef801c240f8bfcaf9a1e5e59923de525" exitCode=0 Sep 29 09:33:40 crc kubenswrapper[4779]: I0929 09:33:40.427773 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jsh2v" event={"ID":"ec71db0f-c295-4583-8314-de4043a1ccdf","Type":"ContainerDied","Data":"65cce4d112b8269d135369fe95dd0345ef801c240f8bfcaf9a1e5e59923de525"} Sep 29 09:33:40 crc kubenswrapper[4779]: I0929 09:33:40.470964 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zvkb9"] Sep 29 09:33:40 crc kubenswrapper[4779]: W0929 09:33:40.512235 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9cab2663_d571_4988_b005_338943d811f5.slice/crio-67329ad6e22fd767269d76229e7d926ac53d23969377811b3147768af3b77d6e WatchSource:0}: Error finding container 67329ad6e22fd767269d76229e7d926ac53d23969377811b3147768af3b77d6e: Status 404 returned error can't find the container with id 67329ad6e22fd767269d76229e7d926ac53d23969377811b3147768af3b77d6e Sep 29 09:33:41 crc kubenswrapper[4779]: I0929 09:33:41.434421 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jsh2v" event={"ID":"ec71db0f-c295-4583-8314-de4043a1ccdf","Type":"ContainerStarted","Data":"66cef5a180994a78a56439b4092089ca9d75310450d4fafc2f235d2395ffc2cb"} Sep 29 09:33:41 crc kubenswrapper[4779]: I0929 09:33:41.435802 4779 generic.go:334] "Generic (PLEG): container finished" podID="9cab2663-d571-4988-b005-338943d811f5" containerID="21a4e5da670e87a5d751a02232ff3891669446d2531fa4f0f247da008a131c77" exitCode=0 Sep 29 09:33:41 crc kubenswrapper[4779]: I0929 09:33:41.436083 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zvkb9" event={"ID":"9cab2663-d571-4988-b005-338943d811f5","Type":"ContainerDied","Data":"21a4e5da670e87a5d751a02232ff3891669446d2531fa4f0f247da008a131c77"} Sep 29 09:33:41 crc kubenswrapper[4779]: I0929 09:33:41.436121 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zvkb9" event={"ID":"9cab2663-d571-4988-b005-338943d811f5","Type":"ContainerStarted","Data":"67329ad6e22fd767269d76229e7d926ac53d23969377811b3147768af3b77d6e"} Sep 29 09:33:41 crc kubenswrapper[4779]: I0929 09:33:41.438035 4779 generic.go:334] "Generic (PLEG): container finished" podID="d65a114a-c403-4e47-9496-eea1f6b50f8a" containerID="220fafc6b393f2db2123d3f2e0537a420bfa6d250b36f5549fd1bd7ed97c745d" exitCode=0 Sep 29 09:33:41 crc kubenswrapper[4779]: I0929 09:33:41.438075 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5rpgs" event={"ID":"d65a114a-c403-4e47-9496-eea1f6b50f8a","Type":"ContainerDied","Data":"220fafc6b393f2db2123d3f2e0537a420bfa6d250b36f5549fd1bd7ed97c745d"} Sep 29 09:33:41 crc kubenswrapper[4779]: I0929 09:33:41.442246 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q5kll" event={"ID":"b1e9d751-a654-48a0-b585-6865e294ce93","Type":"ContainerStarted","Data":"c9218bb9b7980dd1222e81af68b85b8ac63839189c59db482535e7cf9b36628e"} Sep 29 09:33:41 crc kubenswrapper[4779]: I0929 09:33:41.456466 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-jsh2v" podStartSLOduration=1.926352501 podStartE2EDuration="4.456452778s" podCreationTimestamp="2025-09-29 09:33:37 +0000 UTC" firstStartedPulling="2025-09-29 09:33:38.399908113 +0000 UTC m=+250.381232017" lastFinishedPulling="2025-09-29 09:33:40.93000839 +0000 UTC m=+252.911332294" observedRunningTime="2025-09-29 09:33:41.45518892 +0000 UTC m=+253.436512824" watchObservedRunningTime="2025-09-29 09:33:41.456452778 +0000 UTC m=+253.437776682" Sep 29 09:33:41 crc kubenswrapper[4779]: I0929 09:33:41.499813 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-q5kll" podStartSLOduration=2.040769481 podStartE2EDuration="4.499792154s" podCreationTimestamp="2025-09-29 09:33:37 +0000 UTC" firstStartedPulling="2025-09-29 09:33:38.404017819 +0000 UTC m=+250.385341723" lastFinishedPulling="2025-09-29 09:33:40.863040492 +0000 UTC m=+252.844364396" observedRunningTime="2025-09-29 09:33:41.475779999 +0000 UTC m=+253.457103913" watchObservedRunningTime="2025-09-29 09:33:41.499792154 +0000 UTC m=+253.481116058" Sep 29 09:33:43 crc kubenswrapper[4779]: I0929 09:33:43.455562 4779 generic.go:334] "Generic (PLEG): container finished" podID="9cab2663-d571-4988-b005-338943d811f5" containerID="287f2afb10e05c92273efcd69105b2f0e4d3dede642e2b48551b8fa82a9ab27a" exitCode=0 Sep 29 09:33:43 crc kubenswrapper[4779]: I0929 09:33:43.455624 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zvkb9" event={"ID":"9cab2663-d571-4988-b005-338943d811f5","Type":"ContainerDied","Data":"287f2afb10e05c92273efcd69105b2f0e4d3dede642e2b48551b8fa82a9ab27a"} Sep 29 09:33:44 crc kubenswrapper[4779]: I0929 09:33:44.462605 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zvkb9" event={"ID":"9cab2663-d571-4988-b005-338943d811f5","Type":"ContainerStarted","Data":"ba7344b82f80ec6209d9d66911e67eba0719795f36ef788cc51a73855cc35131"} Sep 29 09:33:44 crc kubenswrapper[4779]: I0929 09:33:44.464170 4779 generic.go:334] "Generic (PLEG): container finished" podID="d65a114a-c403-4e47-9496-eea1f6b50f8a" containerID="36dca546572c50c3553d26fd6cb68e12609c1f38d55cc812c7f930e462a486be" exitCode=0 Sep 29 09:33:44 crc kubenswrapper[4779]: I0929 09:33:44.464211 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5rpgs" event={"ID":"d65a114a-c403-4e47-9496-eea1f6b50f8a","Type":"ContainerDied","Data":"36dca546572c50c3553d26fd6cb68e12609c1f38d55cc812c7f930e462a486be"} Sep 29 09:33:44 crc kubenswrapper[4779]: I0929 09:33:44.480034 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-zvkb9" podStartSLOduration=2.9140668119999997 podStartE2EDuration="5.480016976s" podCreationTimestamp="2025-09-29 09:33:39 +0000 UTC" firstStartedPulling="2025-09-29 09:33:41.437153648 +0000 UTC m=+253.418477552" lastFinishedPulling="2025-09-29 09:33:44.003103802 +0000 UTC m=+255.984427716" observedRunningTime="2025-09-29 09:33:44.478422787 +0000 UTC m=+256.459746711" watchObservedRunningTime="2025-09-29 09:33:44.480016976 +0000 UTC m=+256.461340880" Sep 29 09:33:45 crc kubenswrapper[4779]: I0929 09:33:45.471302 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5rpgs" event={"ID":"d65a114a-c403-4e47-9496-eea1f6b50f8a","Type":"ContainerStarted","Data":"5eca7d5d4f40d9b5ee039fec23e3bd21cc5d936fb4d24869907b89e7d8e1759a"} Sep 29 09:33:47 crc kubenswrapper[4779]: I0929 09:33:47.698273 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-jsh2v" Sep 29 09:33:47 crc kubenswrapper[4779]: I0929 09:33:47.698609 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-jsh2v" Sep 29 09:33:47 crc kubenswrapper[4779]: I0929 09:33:47.740202 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-jsh2v" Sep 29 09:33:47 crc kubenswrapper[4779]: I0929 09:33:47.758526 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-5rpgs" podStartSLOduration=5.306572372 podStartE2EDuration="8.758510399s" podCreationTimestamp="2025-09-29 09:33:39 +0000 UTC" firstStartedPulling="2025-09-29 09:33:41.439927083 +0000 UTC m=+253.421250987" lastFinishedPulling="2025-09-29 09:33:44.89186509 +0000 UTC m=+256.873189014" observedRunningTime="2025-09-29 09:33:45.48931701 +0000 UTC m=+257.470640924" watchObservedRunningTime="2025-09-29 09:33:47.758510399 +0000 UTC m=+259.739834303" Sep 29 09:33:47 crc kubenswrapper[4779]: I0929 09:33:47.940355 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-q5kll" Sep 29 09:33:47 crc kubenswrapper[4779]: I0929 09:33:47.940707 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-q5kll" Sep 29 09:33:47 crc kubenswrapper[4779]: I0929 09:33:47.977202 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-q5kll" Sep 29 09:33:48 crc kubenswrapper[4779]: I0929 09:33:48.535717 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-jsh2v" Sep 29 09:33:48 crc kubenswrapper[4779]: I0929 09:33:48.536268 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-q5kll" Sep 29 09:33:50 crc kubenswrapper[4779]: I0929 09:33:50.084735 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-5rpgs" Sep 29 09:33:50 crc kubenswrapper[4779]: I0929 09:33:50.085018 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-5rpgs" Sep 29 09:33:50 crc kubenswrapper[4779]: I0929 09:33:50.132020 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-5rpgs" Sep 29 09:33:50 crc kubenswrapper[4779]: I0929 09:33:50.288458 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-zvkb9" Sep 29 09:33:50 crc kubenswrapper[4779]: I0929 09:33:50.288526 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-zvkb9" Sep 29 09:33:50 crc kubenswrapper[4779]: I0929 09:33:50.325164 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-zvkb9" Sep 29 09:33:50 crc kubenswrapper[4779]: I0929 09:33:50.529965 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-zvkb9" Sep 29 09:33:50 crc kubenswrapper[4779]: I0929 09:33:50.548378 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-5rpgs" Sep 29 09:35:46 crc kubenswrapper[4779]: I0929 09:35:46.966673 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 09:35:46 crc kubenswrapper[4779]: I0929 09:35:46.967404 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 09:36:07 crc kubenswrapper[4779]: I0929 09:36:07.650151 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-gxqcp"] Sep 29 09:36:07 crc kubenswrapper[4779]: I0929 09:36:07.651422 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-gxqcp" Sep 29 09:36:07 crc kubenswrapper[4779]: I0929 09:36:07.669388 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-gxqcp"] Sep 29 09:36:07 crc kubenswrapper[4779]: I0929 09:36:07.762742 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/5028cc79-6e41-44f3-bbf3-028ee0d0433c-registry-tls\") pod \"image-registry-66df7c8f76-gxqcp\" (UID: \"5028cc79-6e41-44f3-bbf3-028ee0d0433c\") " pod="openshift-image-registry/image-registry-66df7c8f76-gxqcp" Sep 29 09:36:07 crc kubenswrapper[4779]: I0929 09:36:07.763085 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/5028cc79-6e41-44f3-bbf3-028ee0d0433c-registry-certificates\") pod \"image-registry-66df7c8f76-gxqcp\" (UID: \"5028cc79-6e41-44f3-bbf3-028ee0d0433c\") " pod="openshift-image-registry/image-registry-66df7c8f76-gxqcp" Sep 29 09:36:07 crc kubenswrapper[4779]: I0929 09:36:07.763105 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/5028cc79-6e41-44f3-bbf3-028ee0d0433c-installation-pull-secrets\") pod \"image-registry-66df7c8f76-gxqcp\" (UID: \"5028cc79-6e41-44f3-bbf3-028ee0d0433c\") " pod="openshift-image-registry/image-registry-66df7c8f76-gxqcp" Sep 29 09:36:07 crc kubenswrapper[4779]: I0929 09:36:07.763140 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-gxqcp\" (UID: \"5028cc79-6e41-44f3-bbf3-028ee0d0433c\") " pod="openshift-image-registry/image-registry-66df7c8f76-gxqcp" Sep 29 09:36:07 crc kubenswrapper[4779]: I0929 09:36:07.763185 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5028cc79-6e41-44f3-bbf3-028ee0d0433c-trusted-ca\") pod \"image-registry-66df7c8f76-gxqcp\" (UID: \"5028cc79-6e41-44f3-bbf3-028ee0d0433c\") " pod="openshift-image-registry/image-registry-66df7c8f76-gxqcp" Sep 29 09:36:07 crc kubenswrapper[4779]: I0929 09:36:07.763323 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5028cc79-6e41-44f3-bbf3-028ee0d0433c-bound-sa-token\") pod \"image-registry-66df7c8f76-gxqcp\" (UID: \"5028cc79-6e41-44f3-bbf3-028ee0d0433c\") " pod="openshift-image-registry/image-registry-66df7c8f76-gxqcp" Sep 29 09:36:07 crc kubenswrapper[4779]: I0929 09:36:07.763416 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/5028cc79-6e41-44f3-bbf3-028ee0d0433c-ca-trust-extracted\") pod \"image-registry-66df7c8f76-gxqcp\" (UID: \"5028cc79-6e41-44f3-bbf3-028ee0d0433c\") " pod="openshift-image-registry/image-registry-66df7c8f76-gxqcp" Sep 29 09:36:07 crc kubenswrapper[4779]: I0929 09:36:07.763460 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6767b\" (UniqueName: \"kubernetes.io/projected/5028cc79-6e41-44f3-bbf3-028ee0d0433c-kube-api-access-6767b\") pod \"image-registry-66df7c8f76-gxqcp\" (UID: \"5028cc79-6e41-44f3-bbf3-028ee0d0433c\") " pod="openshift-image-registry/image-registry-66df7c8f76-gxqcp" Sep 29 09:36:07 crc kubenswrapper[4779]: I0929 09:36:07.789497 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-gxqcp\" (UID: \"5028cc79-6e41-44f3-bbf3-028ee0d0433c\") " pod="openshift-image-registry/image-registry-66df7c8f76-gxqcp" Sep 29 09:36:07 crc kubenswrapper[4779]: I0929 09:36:07.863984 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/5028cc79-6e41-44f3-bbf3-028ee0d0433c-ca-trust-extracted\") pod \"image-registry-66df7c8f76-gxqcp\" (UID: \"5028cc79-6e41-44f3-bbf3-028ee0d0433c\") " pod="openshift-image-registry/image-registry-66df7c8f76-gxqcp" Sep 29 09:36:07 crc kubenswrapper[4779]: I0929 09:36:07.864030 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6767b\" (UniqueName: \"kubernetes.io/projected/5028cc79-6e41-44f3-bbf3-028ee0d0433c-kube-api-access-6767b\") pod \"image-registry-66df7c8f76-gxqcp\" (UID: \"5028cc79-6e41-44f3-bbf3-028ee0d0433c\") " pod="openshift-image-registry/image-registry-66df7c8f76-gxqcp" Sep 29 09:36:07 crc kubenswrapper[4779]: I0929 09:36:07.864084 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/5028cc79-6e41-44f3-bbf3-028ee0d0433c-registry-tls\") pod \"image-registry-66df7c8f76-gxqcp\" (UID: \"5028cc79-6e41-44f3-bbf3-028ee0d0433c\") " pod="openshift-image-registry/image-registry-66df7c8f76-gxqcp" Sep 29 09:36:07 crc kubenswrapper[4779]: I0929 09:36:07.864104 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/5028cc79-6e41-44f3-bbf3-028ee0d0433c-registry-certificates\") pod \"image-registry-66df7c8f76-gxqcp\" (UID: \"5028cc79-6e41-44f3-bbf3-028ee0d0433c\") " pod="openshift-image-registry/image-registry-66df7c8f76-gxqcp" Sep 29 09:36:07 crc kubenswrapper[4779]: I0929 09:36:07.864126 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/5028cc79-6e41-44f3-bbf3-028ee0d0433c-installation-pull-secrets\") pod \"image-registry-66df7c8f76-gxqcp\" (UID: \"5028cc79-6e41-44f3-bbf3-028ee0d0433c\") " pod="openshift-image-registry/image-registry-66df7c8f76-gxqcp" Sep 29 09:36:07 crc kubenswrapper[4779]: I0929 09:36:07.864483 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5028cc79-6e41-44f3-bbf3-028ee0d0433c-trusted-ca\") pod \"image-registry-66df7c8f76-gxqcp\" (UID: \"5028cc79-6e41-44f3-bbf3-028ee0d0433c\") " pod="openshift-image-registry/image-registry-66df7c8f76-gxqcp" Sep 29 09:36:07 crc kubenswrapper[4779]: I0929 09:36:07.865117 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5028cc79-6e41-44f3-bbf3-028ee0d0433c-bound-sa-token\") pod \"image-registry-66df7c8f76-gxqcp\" (UID: \"5028cc79-6e41-44f3-bbf3-028ee0d0433c\") " pod="openshift-image-registry/image-registry-66df7c8f76-gxqcp" Sep 29 09:36:07 crc kubenswrapper[4779]: I0929 09:36:07.864486 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/5028cc79-6e41-44f3-bbf3-028ee0d0433c-ca-trust-extracted\") pod \"image-registry-66df7c8f76-gxqcp\" (UID: \"5028cc79-6e41-44f3-bbf3-028ee0d0433c\") " pod="openshift-image-registry/image-registry-66df7c8f76-gxqcp" Sep 29 09:36:07 crc kubenswrapper[4779]: I0929 09:36:07.865226 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/5028cc79-6e41-44f3-bbf3-028ee0d0433c-registry-certificates\") pod \"image-registry-66df7c8f76-gxqcp\" (UID: \"5028cc79-6e41-44f3-bbf3-028ee0d0433c\") " pod="openshift-image-registry/image-registry-66df7c8f76-gxqcp" Sep 29 09:36:07 crc kubenswrapper[4779]: I0929 09:36:07.866039 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5028cc79-6e41-44f3-bbf3-028ee0d0433c-trusted-ca\") pod \"image-registry-66df7c8f76-gxqcp\" (UID: \"5028cc79-6e41-44f3-bbf3-028ee0d0433c\") " pod="openshift-image-registry/image-registry-66df7c8f76-gxqcp" Sep 29 09:36:07 crc kubenswrapper[4779]: I0929 09:36:07.869736 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/5028cc79-6e41-44f3-bbf3-028ee0d0433c-registry-tls\") pod \"image-registry-66df7c8f76-gxqcp\" (UID: \"5028cc79-6e41-44f3-bbf3-028ee0d0433c\") " pod="openshift-image-registry/image-registry-66df7c8f76-gxqcp" Sep 29 09:36:07 crc kubenswrapper[4779]: I0929 09:36:07.871315 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/5028cc79-6e41-44f3-bbf3-028ee0d0433c-installation-pull-secrets\") pod \"image-registry-66df7c8f76-gxqcp\" (UID: \"5028cc79-6e41-44f3-bbf3-028ee0d0433c\") " pod="openshift-image-registry/image-registry-66df7c8f76-gxqcp" Sep 29 09:36:07 crc kubenswrapper[4779]: I0929 09:36:07.881078 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5028cc79-6e41-44f3-bbf3-028ee0d0433c-bound-sa-token\") pod \"image-registry-66df7c8f76-gxqcp\" (UID: \"5028cc79-6e41-44f3-bbf3-028ee0d0433c\") " pod="openshift-image-registry/image-registry-66df7c8f76-gxqcp" Sep 29 09:36:07 crc kubenswrapper[4779]: I0929 09:36:07.881089 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6767b\" (UniqueName: \"kubernetes.io/projected/5028cc79-6e41-44f3-bbf3-028ee0d0433c-kube-api-access-6767b\") pod \"image-registry-66df7c8f76-gxqcp\" (UID: \"5028cc79-6e41-44f3-bbf3-028ee0d0433c\") " pod="openshift-image-registry/image-registry-66df7c8f76-gxqcp" Sep 29 09:36:07 crc kubenswrapper[4779]: I0929 09:36:07.967025 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-gxqcp" Sep 29 09:36:08 crc kubenswrapper[4779]: I0929 09:36:08.411130 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-gxqcp"] Sep 29 09:36:09 crc kubenswrapper[4779]: I0929 09:36:09.336136 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-gxqcp" event={"ID":"5028cc79-6e41-44f3-bbf3-028ee0d0433c","Type":"ContainerStarted","Data":"ba5acd1dcfd597c6c97e8047892a0138511840a998fe5e74907068c6365b9989"} Sep 29 09:36:09 crc kubenswrapper[4779]: I0929 09:36:09.336419 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-gxqcp" event={"ID":"5028cc79-6e41-44f3-bbf3-028ee0d0433c","Type":"ContainerStarted","Data":"d8e5160ec70f180c008adeccc72f36ea70af8cbe448632d5f4a2a77305a13c7d"} Sep 29 09:36:09 crc kubenswrapper[4779]: I0929 09:36:09.336439 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-gxqcp" Sep 29 09:36:09 crc kubenswrapper[4779]: I0929 09:36:09.370726 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-gxqcp" podStartSLOduration=2.37069873 podStartE2EDuration="2.37069873s" podCreationTimestamp="2025-09-29 09:36:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:36:09.366037575 +0000 UTC m=+401.347361549" watchObservedRunningTime="2025-09-29 09:36:09.37069873 +0000 UTC m=+401.352022674" Sep 29 09:36:16 crc kubenswrapper[4779]: I0929 09:36:16.966171 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 09:36:16 crc kubenswrapper[4779]: I0929 09:36:16.966891 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 09:36:27 crc kubenswrapper[4779]: I0929 09:36:27.978354 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-gxqcp" Sep 29 09:36:28 crc kubenswrapper[4779]: I0929 09:36:28.053821 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-rzc4j"] Sep 29 09:36:46 crc kubenswrapper[4779]: I0929 09:36:46.966399 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 09:36:46 crc kubenswrapper[4779]: I0929 09:36:46.967172 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 09:36:46 crc kubenswrapper[4779]: I0929 09:36:46.967254 4779 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" Sep 29 09:36:46 crc kubenswrapper[4779]: I0929 09:36:46.968320 4779 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"fce338249e8d781ebcc8dd4226aa44e91a894c7151c8ca6d0b4f7848ae00e827"} pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 09:36:46 crc kubenswrapper[4779]: I0929 09:36:46.968447 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" containerID="cri-o://fce338249e8d781ebcc8dd4226aa44e91a894c7151c8ca6d0b4f7848ae00e827" gracePeriod=600 Sep 29 09:36:47 crc kubenswrapper[4779]: I0929 09:36:47.642269 4779 generic.go:334] "Generic (PLEG): container finished" podID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerID="fce338249e8d781ebcc8dd4226aa44e91a894c7151c8ca6d0b4f7848ae00e827" exitCode=0 Sep 29 09:36:47 crc kubenswrapper[4779]: I0929 09:36:47.642376 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" event={"ID":"f1a5d3a7-37d9-4a87-864c-e4af7f504a19","Type":"ContainerDied","Data":"fce338249e8d781ebcc8dd4226aa44e91a894c7151c8ca6d0b4f7848ae00e827"} Sep 29 09:36:47 crc kubenswrapper[4779]: I0929 09:36:47.642845 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" event={"ID":"f1a5d3a7-37d9-4a87-864c-e4af7f504a19","Type":"ContainerStarted","Data":"7390d6cd5471c2ba2b59c4030a2423a19ab8e39bfa71091617e29773167c149f"} Sep 29 09:36:47 crc kubenswrapper[4779]: I0929 09:36:47.642866 4779 scope.go:117] "RemoveContainer" containerID="c7979507aa9ce4c5e1c0f9a0d07716433ab9cebcc02e8b6ba01251ced0bab99f" Sep 29 09:36:53 crc kubenswrapper[4779]: I0929 09:36:53.100375 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" podUID="5f395791-fcf5-4602-903b-06c24127b40e" containerName="registry" containerID="cri-o://d180904e8d065dee7e1f774f298b4fd383e2086e2b562127dd902b0c37359099" gracePeriod=30 Sep 29 09:36:53 crc kubenswrapper[4779]: I0929 09:36:53.511693 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:36:53 crc kubenswrapper[4779]: I0929 09:36:53.570007 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/5f395791-fcf5-4602-903b-06c24127b40e-ca-trust-extracted\") pod \"5f395791-fcf5-4602-903b-06c24127b40e\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " Sep 29 09:36:53 crc kubenswrapper[4779]: I0929 09:36:53.570216 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/5f395791-fcf5-4602-903b-06c24127b40e-registry-certificates\") pod \"5f395791-fcf5-4602-903b-06c24127b40e\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " Sep 29 09:36:53 crc kubenswrapper[4779]: I0929 09:36:53.570327 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dpk8n\" (UniqueName: \"kubernetes.io/projected/5f395791-fcf5-4602-903b-06c24127b40e-kube-api-access-dpk8n\") pod \"5f395791-fcf5-4602-903b-06c24127b40e\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " Sep 29 09:36:53 crc kubenswrapper[4779]: I0929 09:36:53.570420 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/5f395791-fcf5-4602-903b-06c24127b40e-registry-tls\") pod \"5f395791-fcf5-4602-903b-06c24127b40e\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " Sep 29 09:36:53 crc kubenswrapper[4779]: I0929 09:36:53.570502 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5f395791-fcf5-4602-903b-06c24127b40e-trusted-ca\") pod \"5f395791-fcf5-4602-903b-06c24127b40e\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " Sep 29 09:36:53 crc kubenswrapper[4779]: I0929 09:36:53.570638 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5f395791-fcf5-4602-903b-06c24127b40e-bound-sa-token\") pod \"5f395791-fcf5-4602-903b-06c24127b40e\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " Sep 29 09:36:53 crc kubenswrapper[4779]: I0929 09:36:53.570739 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/5f395791-fcf5-4602-903b-06c24127b40e-installation-pull-secrets\") pod \"5f395791-fcf5-4602-903b-06c24127b40e\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " Sep 29 09:36:53 crc kubenswrapper[4779]: I0929 09:36:53.571018 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"5f395791-fcf5-4602-903b-06c24127b40e\" (UID: \"5f395791-fcf5-4602-903b-06c24127b40e\") " Sep 29 09:36:53 crc kubenswrapper[4779]: I0929 09:36:53.572262 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5f395791-fcf5-4602-903b-06c24127b40e-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "5f395791-fcf5-4602-903b-06c24127b40e" (UID: "5f395791-fcf5-4602-903b-06c24127b40e"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:36:53 crc kubenswrapper[4779]: I0929 09:36:53.576530 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5f395791-fcf5-4602-903b-06c24127b40e-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "5f395791-fcf5-4602-903b-06c24127b40e" (UID: "5f395791-fcf5-4602-903b-06c24127b40e"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:36:53 crc kubenswrapper[4779]: I0929 09:36:53.580103 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f395791-fcf5-4602-903b-06c24127b40e-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "5f395791-fcf5-4602-903b-06c24127b40e" (UID: "5f395791-fcf5-4602-903b-06c24127b40e"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:36:53 crc kubenswrapper[4779]: I0929 09:36:53.580146 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f395791-fcf5-4602-903b-06c24127b40e-kube-api-access-dpk8n" (OuterVolumeSpecName: "kube-api-access-dpk8n") pod "5f395791-fcf5-4602-903b-06c24127b40e" (UID: "5f395791-fcf5-4602-903b-06c24127b40e"). InnerVolumeSpecName "kube-api-access-dpk8n". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:36:53 crc kubenswrapper[4779]: I0929 09:36:53.584545 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f395791-fcf5-4602-903b-06c24127b40e-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "5f395791-fcf5-4602-903b-06c24127b40e" (UID: "5f395791-fcf5-4602-903b-06c24127b40e"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:36:53 crc kubenswrapper[4779]: I0929 09:36:53.584714 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f395791-fcf5-4602-903b-06c24127b40e-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "5f395791-fcf5-4602-903b-06c24127b40e" (UID: "5f395791-fcf5-4602-903b-06c24127b40e"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:36:53 crc kubenswrapper[4779]: I0929 09:36:53.590954 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "5f395791-fcf5-4602-903b-06c24127b40e" (UID: "5f395791-fcf5-4602-903b-06c24127b40e"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Sep 29 09:36:53 crc kubenswrapper[4779]: I0929 09:36:53.594518 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5f395791-fcf5-4602-903b-06c24127b40e-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "5f395791-fcf5-4602-903b-06c24127b40e" (UID: "5f395791-fcf5-4602-903b-06c24127b40e"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:36:53 crc kubenswrapper[4779]: I0929 09:36:53.672660 4779 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/5f395791-fcf5-4602-903b-06c24127b40e-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Sep 29 09:36:53 crc kubenswrapper[4779]: I0929 09:36:53.672699 4779 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/5f395791-fcf5-4602-903b-06c24127b40e-registry-certificates\") on node \"crc\" DevicePath \"\"" Sep 29 09:36:53 crc kubenswrapper[4779]: I0929 09:36:53.672710 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dpk8n\" (UniqueName: \"kubernetes.io/projected/5f395791-fcf5-4602-903b-06c24127b40e-kube-api-access-dpk8n\") on node \"crc\" DevicePath \"\"" Sep 29 09:36:53 crc kubenswrapper[4779]: I0929 09:36:53.672720 4779 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/5f395791-fcf5-4602-903b-06c24127b40e-registry-tls\") on node \"crc\" DevicePath \"\"" Sep 29 09:36:53 crc kubenswrapper[4779]: I0929 09:36:53.672729 4779 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5f395791-fcf5-4602-903b-06c24127b40e-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 29 09:36:53 crc kubenswrapper[4779]: I0929 09:36:53.672737 4779 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5f395791-fcf5-4602-903b-06c24127b40e-bound-sa-token\") on node \"crc\" DevicePath \"\"" Sep 29 09:36:53 crc kubenswrapper[4779]: I0929 09:36:53.672746 4779 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/5f395791-fcf5-4602-903b-06c24127b40e-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Sep 29 09:36:53 crc kubenswrapper[4779]: I0929 09:36:53.685414 4779 generic.go:334] "Generic (PLEG): container finished" podID="5f395791-fcf5-4602-903b-06c24127b40e" containerID="d180904e8d065dee7e1f774f298b4fd383e2086e2b562127dd902b0c37359099" exitCode=0 Sep 29 09:36:53 crc kubenswrapper[4779]: I0929 09:36:53.685489 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" event={"ID":"5f395791-fcf5-4602-903b-06c24127b40e","Type":"ContainerDied","Data":"d180904e8d065dee7e1f774f298b4fd383e2086e2b562127dd902b0c37359099"} Sep 29 09:36:53 crc kubenswrapper[4779]: I0929 09:36:53.685523 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" event={"ID":"5f395791-fcf5-4602-903b-06c24127b40e","Type":"ContainerDied","Data":"ce92b6e15a8ce39fe772d0e51692506dd60c935b49c63056ae3d3c0a904a433d"} Sep 29 09:36:53 crc kubenswrapper[4779]: I0929 09:36:53.685575 4779 scope.go:117] "RemoveContainer" containerID="d180904e8d065dee7e1f774f298b4fd383e2086e2b562127dd902b0c37359099" Sep 29 09:36:53 crc kubenswrapper[4779]: I0929 09:36:53.685633 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-rzc4j" Sep 29 09:36:53 crc kubenswrapper[4779]: I0929 09:36:53.703608 4779 scope.go:117] "RemoveContainer" containerID="d180904e8d065dee7e1f774f298b4fd383e2086e2b562127dd902b0c37359099" Sep 29 09:36:53 crc kubenswrapper[4779]: E0929 09:36:53.704042 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d180904e8d065dee7e1f774f298b4fd383e2086e2b562127dd902b0c37359099\": container with ID starting with d180904e8d065dee7e1f774f298b4fd383e2086e2b562127dd902b0c37359099 not found: ID does not exist" containerID="d180904e8d065dee7e1f774f298b4fd383e2086e2b562127dd902b0c37359099" Sep 29 09:36:53 crc kubenswrapper[4779]: I0929 09:36:53.704072 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d180904e8d065dee7e1f774f298b4fd383e2086e2b562127dd902b0c37359099"} err="failed to get container status \"d180904e8d065dee7e1f774f298b4fd383e2086e2b562127dd902b0c37359099\": rpc error: code = NotFound desc = could not find container \"d180904e8d065dee7e1f774f298b4fd383e2086e2b562127dd902b0c37359099\": container with ID starting with d180904e8d065dee7e1f774f298b4fd383e2086e2b562127dd902b0c37359099 not found: ID does not exist" Sep 29 09:36:53 crc kubenswrapper[4779]: I0929 09:36:53.714181 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-rzc4j"] Sep 29 09:36:53 crc kubenswrapper[4779]: I0929 09:36:53.717382 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-rzc4j"] Sep 29 09:36:54 crc kubenswrapper[4779]: I0929 09:36:54.728053 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5f395791-fcf5-4602-903b-06c24127b40e" path="/var/lib/kubelet/pods/5f395791-fcf5-4602-903b-06c24127b40e/volumes" Sep 29 09:39:02 crc kubenswrapper[4779]: I0929 09:39:02.252630 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-mqmnc"] Sep 29 09:39:02 crc kubenswrapper[4779]: E0929 09:39:02.253367 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f395791-fcf5-4602-903b-06c24127b40e" containerName="registry" Sep 29 09:39:02 crc kubenswrapper[4779]: I0929 09:39:02.253380 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f395791-fcf5-4602-903b-06c24127b40e" containerName="registry" Sep 29 09:39:02 crc kubenswrapper[4779]: I0929 09:39:02.253467 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f395791-fcf5-4602-903b-06c24127b40e" containerName="registry" Sep 29 09:39:02 crc kubenswrapper[4779]: I0929 09:39:02.253827 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-mqmnc" Sep 29 09:39:02 crc kubenswrapper[4779]: I0929 09:39:02.256148 4779 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-j5p78" Sep 29 09:39:02 crc kubenswrapper[4779]: I0929 09:39:02.256254 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Sep 29 09:39:02 crc kubenswrapper[4779]: I0929 09:39:02.256254 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Sep 29 09:39:02 crc kubenswrapper[4779]: I0929 09:39:02.256736 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-cj4ns"] Sep 29 09:39:02 crc kubenswrapper[4779]: I0929 09:39:02.257439 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-cj4ns" Sep 29 09:39:02 crc kubenswrapper[4779]: I0929 09:39:02.258796 4779 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-zv84b" Sep 29 09:39:02 crc kubenswrapper[4779]: I0929 09:39:02.270196 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-mqmnc"] Sep 29 09:39:02 crc kubenswrapper[4779]: I0929 09:39:02.273316 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-znrc5"] Sep 29 09:39:02 crc kubenswrapper[4779]: I0929 09:39:02.274162 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-znrc5" Sep 29 09:39:02 crc kubenswrapper[4779]: I0929 09:39:02.276493 4779 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-2xqkk" Sep 29 09:39:02 crc kubenswrapper[4779]: I0929 09:39:02.280074 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-cj4ns"] Sep 29 09:39:02 crc kubenswrapper[4779]: I0929 09:39:02.291937 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-znrc5"] Sep 29 09:39:02 crc kubenswrapper[4779]: I0929 09:39:02.382015 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6rgz5\" (UniqueName: \"kubernetes.io/projected/8941c68d-b798-425d-add1-c47fb552d2ba-kube-api-access-6rgz5\") pod \"cert-manager-webhook-5655c58dd6-znrc5\" (UID: \"8941c68d-b798-425d-add1-c47fb552d2ba\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-znrc5" Sep 29 09:39:02 crc kubenswrapper[4779]: I0929 09:39:02.382081 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9d4vk\" (UniqueName: \"kubernetes.io/projected/547d471c-18d8-4abb-bbfb-99eb0d042eae-kube-api-access-9d4vk\") pod \"cert-manager-5b446d88c5-cj4ns\" (UID: \"547d471c-18d8-4abb-bbfb-99eb0d042eae\") " pod="cert-manager/cert-manager-5b446d88c5-cj4ns" Sep 29 09:39:02 crc kubenswrapper[4779]: I0929 09:39:02.382128 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5599z\" (UniqueName: \"kubernetes.io/projected/4087c879-377a-4732-b7a5-67cd5e9552ed-kube-api-access-5599z\") pod \"cert-manager-cainjector-7f985d654d-mqmnc\" (UID: \"4087c879-377a-4732-b7a5-67cd5e9552ed\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-mqmnc" Sep 29 09:39:02 crc kubenswrapper[4779]: I0929 09:39:02.482885 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6rgz5\" (UniqueName: \"kubernetes.io/projected/8941c68d-b798-425d-add1-c47fb552d2ba-kube-api-access-6rgz5\") pod \"cert-manager-webhook-5655c58dd6-znrc5\" (UID: \"8941c68d-b798-425d-add1-c47fb552d2ba\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-znrc5" Sep 29 09:39:02 crc kubenswrapper[4779]: I0929 09:39:02.482964 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9d4vk\" (UniqueName: \"kubernetes.io/projected/547d471c-18d8-4abb-bbfb-99eb0d042eae-kube-api-access-9d4vk\") pod \"cert-manager-5b446d88c5-cj4ns\" (UID: \"547d471c-18d8-4abb-bbfb-99eb0d042eae\") " pod="cert-manager/cert-manager-5b446d88c5-cj4ns" Sep 29 09:39:02 crc kubenswrapper[4779]: I0929 09:39:02.483003 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5599z\" (UniqueName: \"kubernetes.io/projected/4087c879-377a-4732-b7a5-67cd5e9552ed-kube-api-access-5599z\") pod \"cert-manager-cainjector-7f985d654d-mqmnc\" (UID: \"4087c879-377a-4732-b7a5-67cd5e9552ed\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-mqmnc" Sep 29 09:39:02 crc kubenswrapper[4779]: I0929 09:39:02.499261 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5599z\" (UniqueName: \"kubernetes.io/projected/4087c879-377a-4732-b7a5-67cd5e9552ed-kube-api-access-5599z\") pod \"cert-manager-cainjector-7f985d654d-mqmnc\" (UID: \"4087c879-377a-4732-b7a5-67cd5e9552ed\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-mqmnc" Sep 29 09:39:02 crc kubenswrapper[4779]: I0929 09:39:02.499319 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6rgz5\" (UniqueName: \"kubernetes.io/projected/8941c68d-b798-425d-add1-c47fb552d2ba-kube-api-access-6rgz5\") pod \"cert-manager-webhook-5655c58dd6-znrc5\" (UID: \"8941c68d-b798-425d-add1-c47fb552d2ba\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-znrc5" Sep 29 09:39:02 crc kubenswrapper[4779]: I0929 09:39:02.508702 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9d4vk\" (UniqueName: \"kubernetes.io/projected/547d471c-18d8-4abb-bbfb-99eb0d042eae-kube-api-access-9d4vk\") pod \"cert-manager-5b446d88c5-cj4ns\" (UID: \"547d471c-18d8-4abb-bbfb-99eb0d042eae\") " pod="cert-manager/cert-manager-5b446d88c5-cj4ns" Sep 29 09:39:02 crc kubenswrapper[4779]: I0929 09:39:02.577130 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-mqmnc" Sep 29 09:39:02 crc kubenswrapper[4779]: I0929 09:39:02.601446 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-cj4ns" Sep 29 09:39:02 crc kubenswrapper[4779]: I0929 09:39:02.610464 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-znrc5" Sep 29 09:39:02 crc kubenswrapper[4779]: I0929 09:39:02.823427 4779 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 09:39:02 crc kubenswrapper[4779]: I0929 09:39:02.825206 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-cj4ns"] Sep 29 09:39:02 crc kubenswrapper[4779]: I0929 09:39:02.862752 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-znrc5"] Sep 29 09:39:02 crc kubenswrapper[4779]: W0929 09:39:02.866550 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8941c68d_b798_425d_add1_c47fb552d2ba.slice/crio-25b73650fb18eb5c33df3a0ebe966233fdff5ea39c4e3cb2a50de5f65eb3e8dd WatchSource:0}: Error finding container 25b73650fb18eb5c33df3a0ebe966233fdff5ea39c4e3cb2a50de5f65eb3e8dd: Status 404 returned error can't find the container with id 25b73650fb18eb5c33df3a0ebe966233fdff5ea39c4e3cb2a50de5f65eb3e8dd Sep 29 09:39:02 crc kubenswrapper[4779]: I0929 09:39:02.963277 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-mqmnc"] Sep 29 09:39:02 crc kubenswrapper[4779]: W0929 09:39:02.969220 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4087c879_377a_4732_b7a5_67cd5e9552ed.slice/crio-d7cc854df3d2cc104081479d11bf1ba99ab5500f885ce4fb9ed34633ceeb5cb6 WatchSource:0}: Error finding container d7cc854df3d2cc104081479d11bf1ba99ab5500f885ce4fb9ed34633ceeb5cb6: Status 404 returned error can't find the container with id d7cc854df3d2cc104081479d11bf1ba99ab5500f885ce4fb9ed34633ceeb5cb6 Sep 29 09:39:03 crc kubenswrapper[4779]: I0929 09:39:03.483072 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-mqmnc" event={"ID":"4087c879-377a-4732-b7a5-67cd5e9552ed","Type":"ContainerStarted","Data":"d7cc854df3d2cc104081479d11bf1ba99ab5500f885ce4fb9ed34633ceeb5cb6"} Sep 29 09:39:03 crc kubenswrapper[4779]: I0929 09:39:03.484279 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-cj4ns" event={"ID":"547d471c-18d8-4abb-bbfb-99eb0d042eae","Type":"ContainerStarted","Data":"bd7faf9f9e97ab97f1e9b673c5f2a1a0c35bd020e63d713515d9b33b47f0094c"} Sep 29 09:39:03 crc kubenswrapper[4779]: I0929 09:39:03.485400 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-znrc5" event={"ID":"8941c68d-b798-425d-add1-c47fb552d2ba","Type":"ContainerStarted","Data":"25b73650fb18eb5c33df3a0ebe966233fdff5ea39c4e3cb2a50de5f65eb3e8dd"} Sep 29 09:39:06 crc kubenswrapper[4779]: I0929 09:39:06.517060 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-cj4ns" event={"ID":"547d471c-18d8-4abb-bbfb-99eb0d042eae","Type":"ContainerStarted","Data":"58e9c2ad848c4cec6217dc14c7c8c21dfb87291a20c32f5f57826b20dfd9cec3"} Sep 29 09:39:06 crc kubenswrapper[4779]: I0929 09:39:06.520513 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-znrc5" event={"ID":"8941c68d-b798-425d-add1-c47fb552d2ba","Type":"ContainerStarted","Data":"5ca10c5bbd6e8c597aa9f33850ee4f1c129b13f0c87f7e30ec401f1d75ec645c"} Sep 29 09:39:06 crc kubenswrapper[4779]: I0929 09:39:06.520661 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-znrc5" Sep 29 09:39:06 crc kubenswrapper[4779]: I0929 09:39:06.537117 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-cj4ns" podStartSLOduration=1.794573952 podStartE2EDuration="4.537086407s" podCreationTimestamp="2025-09-29 09:39:02 +0000 UTC" firstStartedPulling="2025-09-29 09:39:02.82310638 +0000 UTC m=+574.804430284" lastFinishedPulling="2025-09-29 09:39:05.565618825 +0000 UTC m=+577.546942739" observedRunningTime="2025-09-29 09:39:06.53057215 +0000 UTC m=+578.511896104" watchObservedRunningTime="2025-09-29 09:39:06.537086407 +0000 UTC m=+578.518410331" Sep 29 09:39:06 crc kubenswrapper[4779]: I0929 09:39:06.554987 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-znrc5" podStartSLOduration=1.9209970379999999 podStartE2EDuration="4.554953111s" podCreationTimestamp="2025-09-29 09:39:02 +0000 UTC" firstStartedPulling="2025-09-29 09:39:02.86830034 +0000 UTC m=+574.849624244" lastFinishedPulling="2025-09-29 09:39:05.502256413 +0000 UTC m=+577.483580317" observedRunningTime="2025-09-29 09:39:06.547635681 +0000 UTC m=+578.528959605" watchObservedRunningTime="2025-09-29 09:39:06.554953111 +0000 UTC m=+578.536277055" Sep 29 09:39:07 crc kubenswrapper[4779]: I0929 09:39:07.527988 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-mqmnc" event={"ID":"4087c879-377a-4732-b7a5-67cd5e9552ed","Type":"ContainerStarted","Data":"35f391b8c2dec30d167db4084788ed22f98573755bbc5dc5158b5afa920bf8a0"} Sep 29 09:39:07 crc kubenswrapper[4779]: I0929 09:39:07.542823 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-mqmnc" podStartSLOduration=2.002392979 podStartE2EDuration="5.542805255s" podCreationTimestamp="2025-09-29 09:39:02 +0000 UTC" firstStartedPulling="2025-09-29 09:39:02.971446316 +0000 UTC m=+574.952770220" lastFinishedPulling="2025-09-29 09:39:06.511858562 +0000 UTC m=+578.493182496" observedRunningTime="2025-09-29 09:39:07.542276909 +0000 UTC m=+579.523600813" watchObservedRunningTime="2025-09-29 09:39:07.542805255 +0000 UTC m=+579.524129159" Sep 29 09:39:12 crc kubenswrapper[4779]: I0929 09:39:12.613703 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-znrc5" Sep 29 09:39:12 crc kubenswrapper[4779]: I0929 09:39:12.883110 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-ncxc4"] Sep 29 09:39:12 crc kubenswrapper[4779]: I0929 09:39:12.883606 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" podUID="60d71749-dfb5-4095-b11b-b70f1a549b88" containerName="ovn-controller" containerID="cri-o://6d6677148b1efa21f5eee1dcbccef6d6ea0492513bfa621b1a862488653bc8f3" gracePeriod=30 Sep 29 09:39:12 crc kubenswrapper[4779]: I0929 09:39:12.883677 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" podUID="60d71749-dfb5-4095-b11b-b70f1a549b88" containerName="sbdb" containerID="cri-o://9e140d46db39380a5737d78e1aa00ad1a5a9cc217de910ca4212beae87764795" gracePeriod=30 Sep 29 09:39:12 crc kubenswrapper[4779]: I0929 09:39:12.883736 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" podUID="60d71749-dfb5-4095-b11b-b70f1a549b88" containerName="nbdb" containerID="cri-o://7851ecb6e73d56037db56d7d12fec0a095403080daa7bbf0d4cd79d9a4fd1bd4" gracePeriod=30 Sep 29 09:39:12 crc kubenswrapper[4779]: I0929 09:39:12.883747 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" podUID="60d71749-dfb5-4095-b11b-b70f1a549b88" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://d79814aeee8cb50cbe9a3153ed1a99a82921ac740553c82f6aafd1d7f628cbdd" gracePeriod=30 Sep 29 09:39:12 crc kubenswrapper[4779]: I0929 09:39:12.883675 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" podUID="60d71749-dfb5-4095-b11b-b70f1a549b88" containerName="northd" containerID="cri-o://fa8dee8aadd625828a22e22668d8f7295ddc3fe24976be6c8610da1157e28d7c" gracePeriod=30 Sep 29 09:39:12 crc kubenswrapper[4779]: I0929 09:39:12.883806 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" podUID="60d71749-dfb5-4095-b11b-b70f1a549b88" containerName="kube-rbac-proxy-node" containerID="cri-o://a334fd50a045f97d97f67b45abf88dc6aaf3d7c2121becc6a9744c86c977b070" gracePeriod=30 Sep 29 09:39:12 crc kubenswrapper[4779]: I0929 09:39:12.883832 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" podUID="60d71749-dfb5-4095-b11b-b70f1a549b88" containerName="ovn-acl-logging" containerID="cri-o://a11183ba612a838b92d401b13a2e0d7d1c19de533ecef56b4305e5a9f0784bb4" gracePeriod=30 Sep 29 09:39:12 crc kubenswrapper[4779]: I0929 09:39:12.970307 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" podUID="60d71749-dfb5-4095-b11b-b70f1a549b88" containerName="ovnkube-controller" containerID="cri-o://ae270c4b8f37d6667fbfe00567b00a3aedbc780c87b2638b23a3a8dfc3f36806" gracePeriod=30 Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.217311 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-ncxc4_60d71749-dfb5-4095-b11b-b70f1a549b88/ovnkube-controller/3.log" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.219482 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-ncxc4_60d71749-dfb5-4095-b11b-b70f1a549b88/ovn-acl-logging/0.log" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.219880 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-ncxc4_60d71749-dfb5-4095-b11b-b70f1a549b88/ovn-controller/0.log" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.220337 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.264015 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-zw4c8"] Sep 29 09:39:13 crc kubenswrapper[4779]: E0929 09:39:13.264213 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60d71749-dfb5-4095-b11b-b70f1a549b88" containerName="kube-rbac-proxy-ovn-metrics" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.264226 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="60d71749-dfb5-4095-b11b-b70f1a549b88" containerName="kube-rbac-proxy-ovn-metrics" Sep 29 09:39:13 crc kubenswrapper[4779]: E0929 09:39:13.264236 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60d71749-dfb5-4095-b11b-b70f1a549b88" containerName="sbdb" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.264242 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="60d71749-dfb5-4095-b11b-b70f1a549b88" containerName="sbdb" Sep 29 09:39:13 crc kubenswrapper[4779]: E0929 09:39:13.264249 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60d71749-dfb5-4095-b11b-b70f1a549b88" containerName="nbdb" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.264254 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="60d71749-dfb5-4095-b11b-b70f1a549b88" containerName="nbdb" Sep 29 09:39:13 crc kubenswrapper[4779]: E0929 09:39:13.264263 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60d71749-dfb5-4095-b11b-b70f1a549b88" containerName="ovnkube-controller" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.264269 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="60d71749-dfb5-4095-b11b-b70f1a549b88" containerName="ovnkube-controller" Sep 29 09:39:13 crc kubenswrapper[4779]: E0929 09:39:13.264277 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60d71749-dfb5-4095-b11b-b70f1a549b88" containerName="ovnkube-controller" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.264283 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="60d71749-dfb5-4095-b11b-b70f1a549b88" containerName="ovnkube-controller" Sep 29 09:39:13 crc kubenswrapper[4779]: E0929 09:39:13.264291 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60d71749-dfb5-4095-b11b-b70f1a549b88" containerName="northd" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.264297 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="60d71749-dfb5-4095-b11b-b70f1a549b88" containerName="northd" Sep 29 09:39:13 crc kubenswrapper[4779]: E0929 09:39:13.264304 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60d71749-dfb5-4095-b11b-b70f1a549b88" containerName="ovnkube-controller" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.264309 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="60d71749-dfb5-4095-b11b-b70f1a549b88" containerName="ovnkube-controller" Sep 29 09:39:13 crc kubenswrapper[4779]: E0929 09:39:13.264317 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60d71749-dfb5-4095-b11b-b70f1a549b88" containerName="ovn-controller" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.264323 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="60d71749-dfb5-4095-b11b-b70f1a549b88" containerName="ovn-controller" Sep 29 09:39:13 crc kubenswrapper[4779]: E0929 09:39:13.264331 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60d71749-dfb5-4095-b11b-b70f1a549b88" containerName="kube-rbac-proxy-node" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.264336 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="60d71749-dfb5-4095-b11b-b70f1a549b88" containerName="kube-rbac-proxy-node" Sep 29 09:39:13 crc kubenswrapper[4779]: E0929 09:39:13.264343 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60d71749-dfb5-4095-b11b-b70f1a549b88" containerName="kubecfg-setup" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.264349 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="60d71749-dfb5-4095-b11b-b70f1a549b88" containerName="kubecfg-setup" Sep 29 09:39:13 crc kubenswrapper[4779]: E0929 09:39:13.264356 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60d71749-dfb5-4095-b11b-b70f1a549b88" containerName="ovnkube-controller" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.264363 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="60d71749-dfb5-4095-b11b-b70f1a549b88" containerName="ovnkube-controller" Sep 29 09:39:13 crc kubenswrapper[4779]: E0929 09:39:13.264372 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60d71749-dfb5-4095-b11b-b70f1a549b88" containerName="ovnkube-controller" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.264378 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="60d71749-dfb5-4095-b11b-b70f1a549b88" containerName="ovnkube-controller" Sep 29 09:39:13 crc kubenswrapper[4779]: E0929 09:39:13.264385 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60d71749-dfb5-4095-b11b-b70f1a549b88" containerName="ovn-acl-logging" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.264417 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="60d71749-dfb5-4095-b11b-b70f1a549b88" containerName="ovn-acl-logging" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.264518 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="60d71749-dfb5-4095-b11b-b70f1a549b88" containerName="kube-rbac-proxy-ovn-metrics" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.264528 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="60d71749-dfb5-4095-b11b-b70f1a549b88" containerName="northd" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.264534 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="60d71749-dfb5-4095-b11b-b70f1a549b88" containerName="nbdb" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.264543 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="60d71749-dfb5-4095-b11b-b70f1a549b88" containerName="kube-rbac-proxy-node" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.264552 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="60d71749-dfb5-4095-b11b-b70f1a549b88" containerName="ovnkube-controller" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.264559 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="60d71749-dfb5-4095-b11b-b70f1a549b88" containerName="ovn-controller" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.264566 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="60d71749-dfb5-4095-b11b-b70f1a549b88" containerName="ovn-acl-logging" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.264574 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="60d71749-dfb5-4095-b11b-b70f1a549b88" containerName="ovnkube-controller" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.264580 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="60d71749-dfb5-4095-b11b-b70f1a549b88" containerName="ovnkube-controller" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.264587 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="60d71749-dfb5-4095-b11b-b70f1a549b88" containerName="sbdb" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.264594 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="60d71749-dfb5-4095-b11b-b70f1a549b88" containerName="ovnkube-controller" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.264779 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="60d71749-dfb5-4095-b11b-b70f1a549b88" containerName="ovnkube-controller" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.266298 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.333026 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-host-run-ovn-kubernetes\") pod \"60d71749-dfb5-4095-b11b-b70f1a549b88\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.333080 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-run-openvswitch\") pod \"60d71749-dfb5-4095-b11b-b70f1a549b88\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.333104 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-node-log\") pod \"60d71749-dfb5-4095-b11b-b70f1a549b88\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.333135 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/60d71749-dfb5-4095-b11b-b70f1a549b88-env-overrides\") pod \"60d71749-dfb5-4095-b11b-b70f1a549b88\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.333151 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "60d71749-dfb5-4095-b11b-b70f1a549b88" (UID: "60d71749-dfb5-4095-b11b-b70f1a549b88"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.333173 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-run-systemd\") pod \"60d71749-dfb5-4095-b11b-b70f1a549b88\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.333174 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "60d71749-dfb5-4095-b11b-b70f1a549b88" (UID: "60d71749-dfb5-4095-b11b-b70f1a549b88"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.333189 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-node-log" (OuterVolumeSpecName: "node-log") pod "60d71749-dfb5-4095-b11b-b70f1a549b88" (UID: "60d71749-dfb5-4095-b11b-b70f1a549b88"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.333199 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m99nd\" (UniqueName: \"kubernetes.io/projected/60d71749-dfb5-4095-b11b-b70f1a549b88-kube-api-access-m99nd\") pod \"60d71749-dfb5-4095-b11b-b70f1a549b88\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.333240 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/60d71749-dfb5-4095-b11b-b70f1a549b88-ovn-node-metrics-cert\") pod \"60d71749-dfb5-4095-b11b-b70f1a549b88\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.333268 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-log-socket\") pod \"60d71749-dfb5-4095-b11b-b70f1a549b88\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.333308 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/60d71749-dfb5-4095-b11b-b70f1a549b88-ovnkube-script-lib\") pod \"60d71749-dfb5-4095-b11b-b70f1a549b88\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.333352 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-var-lib-openvswitch\") pod \"60d71749-dfb5-4095-b11b-b70f1a549b88\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.333372 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-run-ovn\") pod \"60d71749-dfb5-4095-b11b-b70f1a549b88\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.333390 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-host-slash\") pod \"60d71749-dfb5-4095-b11b-b70f1a549b88\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.333413 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-host-run-netns\") pod \"60d71749-dfb5-4095-b11b-b70f1a549b88\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.333432 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-host-cni-bin\") pod \"60d71749-dfb5-4095-b11b-b70f1a549b88\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.333431 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "60d71749-dfb5-4095-b11b-b70f1a549b88" (UID: "60d71749-dfb5-4095-b11b-b70f1a549b88"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.333464 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/60d71749-dfb5-4095-b11b-b70f1a549b88-ovnkube-config\") pod \"60d71749-dfb5-4095-b11b-b70f1a549b88\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.333477 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-host-slash" (OuterVolumeSpecName: "host-slash") pod "60d71749-dfb5-4095-b11b-b70f1a549b88" (UID: "60d71749-dfb5-4095-b11b-b70f1a549b88"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.333507 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "60d71749-dfb5-4095-b11b-b70f1a549b88" (UID: "60d71749-dfb5-4095-b11b-b70f1a549b88"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.333506 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "60d71749-dfb5-4095-b11b-b70f1a549b88" (UID: "60d71749-dfb5-4095-b11b-b70f1a549b88"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.333566 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "60d71749-dfb5-4095-b11b-b70f1a549b88" (UID: "60d71749-dfb5-4095-b11b-b70f1a549b88"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.333608 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-host-cni-netd\") pod \"60d71749-dfb5-4095-b11b-b70f1a549b88\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.333634 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-host-var-lib-cni-networks-ovn-kubernetes\") pod \"60d71749-dfb5-4095-b11b-b70f1a549b88\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.333719 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-host-kubelet\") pod \"60d71749-dfb5-4095-b11b-b70f1a549b88\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.333739 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-systemd-units\") pod \"60d71749-dfb5-4095-b11b-b70f1a549b88\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.333761 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-etc-openvswitch\") pod \"60d71749-dfb5-4095-b11b-b70f1a549b88\" (UID: \"60d71749-dfb5-4095-b11b-b70f1a549b88\") " Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.333669 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "60d71749-dfb5-4095-b11b-b70f1a549b88" (UID: "60d71749-dfb5-4095-b11b-b70f1a549b88"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.333675 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "60d71749-dfb5-4095-b11b-b70f1a549b88" (UID: "60d71749-dfb5-4095-b11b-b70f1a549b88"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.333819 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "60d71749-dfb5-4095-b11b-b70f1a549b88" (UID: "60d71749-dfb5-4095-b11b-b70f1a549b88"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.333885 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "60d71749-dfb5-4095-b11b-b70f1a549b88" (UID: "60d71749-dfb5-4095-b11b-b70f1a549b88"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.333869 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "60d71749-dfb5-4095-b11b-b70f1a549b88" (UID: "60d71749-dfb5-4095-b11b-b70f1a549b88"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.333951 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/60d71749-dfb5-4095-b11b-b70f1a549b88-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "60d71749-dfb5-4095-b11b-b70f1a549b88" (UID: "60d71749-dfb5-4095-b11b-b70f1a549b88"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.334022 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/e9334f2e-1314-4e4a-9bc1-f1c5a241584e-systemd-units\") pod \"ovnkube-node-zw4c8\" (UID: \"e9334f2e-1314-4e4a-9bc1-f1c5a241584e\") " pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.334050 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/e9334f2e-1314-4e4a-9bc1-f1c5a241584e-ovnkube-script-lib\") pod \"ovnkube-node-zw4c8\" (UID: \"e9334f2e-1314-4e4a-9bc1-f1c5a241584e\") " pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.334143 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/60d71749-dfb5-4095-b11b-b70f1a549b88-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "60d71749-dfb5-4095-b11b-b70f1a549b88" (UID: "60d71749-dfb5-4095-b11b-b70f1a549b88"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.334155 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/60d71749-dfb5-4095-b11b-b70f1a549b88-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "60d71749-dfb5-4095-b11b-b70f1a549b88" (UID: "60d71749-dfb5-4095-b11b-b70f1a549b88"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.334164 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e9334f2e-1314-4e4a-9bc1-f1c5a241584e-env-overrides\") pod \"ovnkube-node-zw4c8\" (UID: \"e9334f2e-1314-4e4a-9bc1-f1c5a241584e\") " pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.334193 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/e9334f2e-1314-4e4a-9bc1-f1c5a241584e-run-ovn\") pod \"ovnkube-node-zw4c8\" (UID: \"e9334f2e-1314-4e4a-9bc1-f1c5a241584e\") " pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.334228 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e9334f2e-1314-4e4a-9bc1-f1c5a241584e-ovnkube-config\") pod \"ovnkube-node-zw4c8\" (UID: \"e9334f2e-1314-4e4a-9bc1-f1c5a241584e\") " pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.334000 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-log-socket" (OuterVolumeSpecName: "log-socket") pod "60d71749-dfb5-4095-b11b-b70f1a549b88" (UID: "60d71749-dfb5-4095-b11b-b70f1a549b88"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.334281 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e9334f2e-1314-4e4a-9bc1-f1c5a241584e-host-cni-bin\") pod \"ovnkube-node-zw4c8\" (UID: \"e9334f2e-1314-4e4a-9bc1-f1c5a241584e\") " pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.334322 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/e9334f2e-1314-4e4a-9bc1-f1c5a241584e-log-socket\") pod \"ovnkube-node-zw4c8\" (UID: \"e9334f2e-1314-4e4a-9bc1-f1c5a241584e\") " pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.334374 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/e9334f2e-1314-4e4a-9bc1-f1c5a241584e-host-kubelet\") pod \"ovnkube-node-zw4c8\" (UID: \"e9334f2e-1314-4e4a-9bc1-f1c5a241584e\") " pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.334402 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e9334f2e-1314-4e4a-9bc1-f1c5a241584e-run-openvswitch\") pod \"ovnkube-node-zw4c8\" (UID: \"e9334f2e-1314-4e4a-9bc1-f1c5a241584e\") " pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.334427 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/e9334f2e-1314-4e4a-9bc1-f1c5a241584e-host-cni-netd\") pod \"ovnkube-node-zw4c8\" (UID: \"e9334f2e-1314-4e4a-9bc1-f1c5a241584e\") " pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.334460 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/e9334f2e-1314-4e4a-9bc1-f1c5a241584e-run-systemd\") pod \"ovnkube-node-zw4c8\" (UID: \"e9334f2e-1314-4e4a-9bc1-f1c5a241584e\") " pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.334486 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e9334f2e-1314-4e4a-9bc1-f1c5a241584e-etc-openvswitch\") pod \"ovnkube-node-zw4c8\" (UID: \"e9334f2e-1314-4e4a-9bc1-f1c5a241584e\") " pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.334510 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5bhfd\" (UniqueName: \"kubernetes.io/projected/e9334f2e-1314-4e4a-9bc1-f1c5a241584e-kube-api-access-5bhfd\") pod \"ovnkube-node-zw4c8\" (UID: \"e9334f2e-1314-4e4a-9bc1-f1c5a241584e\") " pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.334549 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e9334f2e-1314-4e4a-9bc1-f1c5a241584e-host-run-ovn-kubernetes\") pod \"ovnkube-node-zw4c8\" (UID: \"e9334f2e-1314-4e4a-9bc1-f1c5a241584e\") " pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.334582 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e9334f2e-1314-4e4a-9bc1-f1c5a241584e-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-zw4c8\" (UID: \"e9334f2e-1314-4e4a-9bc1-f1c5a241584e\") " pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.334608 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e9334f2e-1314-4e4a-9bc1-f1c5a241584e-host-run-netns\") pod \"ovnkube-node-zw4c8\" (UID: \"e9334f2e-1314-4e4a-9bc1-f1c5a241584e\") " pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.334660 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e9334f2e-1314-4e4a-9bc1-f1c5a241584e-var-lib-openvswitch\") pod \"ovnkube-node-zw4c8\" (UID: \"e9334f2e-1314-4e4a-9bc1-f1c5a241584e\") " pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.334710 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/e9334f2e-1314-4e4a-9bc1-f1c5a241584e-node-log\") pod \"ovnkube-node-zw4c8\" (UID: \"e9334f2e-1314-4e4a-9bc1-f1c5a241584e\") " pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.334726 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e9334f2e-1314-4e4a-9bc1-f1c5a241584e-ovn-node-metrics-cert\") pod \"ovnkube-node-zw4c8\" (UID: \"e9334f2e-1314-4e4a-9bc1-f1c5a241584e\") " pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.334742 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/e9334f2e-1314-4e4a-9bc1-f1c5a241584e-host-slash\") pod \"ovnkube-node-zw4c8\" (UID: \"e9334f2e-1314-4e4a-9bc1-f1c5a241584e\") " pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.334881 4779 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-log-socket\") on node \"crc\" DevicePath \"\"" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.334938 4779 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/60d71749-dfb5-4095-b11b-b70f1a549b88-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.334954 4779 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.334965 4779 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-run-ovn\") on node \"crc\" DevicePath \"\"" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.334976 4779 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-host-slash\") on node \"crc\" DevicePath \"\"" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.334987 4779 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-host-run-netns\") on node \"crc\" DevicePath \"\"" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.335019 4779 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-host-cni-bin\") on node \"crc\" DevicePath \"\"" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.335027 4779 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/60d71749-dfb5-4095-b11b-b70f1a549b88-ovnkube-config\") on node \"crc\" DevicePath \"\"" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.335036 4779 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-host-cni-netd\") on node \"crc\" DevicePath \"\"" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.335044 4779 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.335051 4779 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-host-kubelet\") on node \"crc\" DevicePath \"\"" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.335060 4779 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-systemd-units\") on node \"crc\" DevicePath \"\"" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.335068 4779 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.335097 4779 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.335105 4779 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-run-openvswitch\") on node \"crc\" DevicePath \"\"" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.335112 4779 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-node-log\") on node \"crc\" DevicePath \"\"" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.335120 4779 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/60d71749-dfb5-4095-b11b-b70f1a549b88-env-overrides\") on node \"crc\" DevicePath \"\"" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.338277 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/60d71749-dfb5-4095-b11b-b70f1a549b88-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "60d71749-dfb5-4095-b11b-b70f1a549b88" (UID: "60d71749-dfb5-4095-b11b-b70f1a549b88"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.339155 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/60d71749-dfb5-4095-b11b-b70f1a549b88-kube-api-access-m99nd" (OuterVolumeSpecName: "kube-api-access-m99nd") pod "60d71749-dfb5-4095-b11b-b70f1a549b88" (UID: "60d71749-dfb5-4095-b11b-b70f1a549b88"). InnerVolumeSpecName "kube-api-access-m99nd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.345283 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "60d71749-dfb5-4095-b11b-b70f1a549b88" (UID: "60d71749-dfb5-4095-b11b-b70f1a549b88"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.436368 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e9334f2e-1314-4e4a-9bc1-f1c5a241584e-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-zw4c8\" (UID: \"e9334f2e-1314-4e4a-9bc1-f1c5a241584e\") " pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.436450 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e9334f2e-1314-4e4a-9bc1-f1c5a241584e-host-run-netns\") pod \"ovnkube-node-zw4c8\" (UID: \"e9334f2e-1314-4e4a-9bc1-f1c5a241584e\") " pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.436483 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e9334f2e-1314-4e4a-9bc1-f1c5a241584e-var-lib-openvswitch\") pod \"ovnkube-node-zw4c8\" (UID: \"e9334f2e-1314-4e4a-9bc1-f1c5a241584e\") " pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.436505 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/e9334f2e-1314-4e4a-9bc1-f1c5a241584e-node-log\") pod \"ovnkube-node-zw4c8\" (UID: \"e9334f2e-1314-4e4a-9bc1-f1c5a241584e\") " pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.436516 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e9334f2e-1314-4e4a-9bc1-f1c5a241584e-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-zw4c8\" (UID: \"e9334f2e-1314-4e4a-9bc1-f1c5a241584e\") " pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.436574 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e9334f2e-1314-4e4a-9bc1-f1c5a241584e-host-run-netns\") pod \"ovnkube-node-zw4c8\" (UID: \"e9334f2e-1314-4e4a-9bc1-f1c5a241584e\") " pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.436599 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/e9334f2e-1314-4e4a-9bc1-f1c5a241584e-node-log\") pod \"ovnkube-node-zw4c8\" (UID: \"e9334f2e-1314-4e4a-9bc1-f1c5a241584e\") " pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.436603 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e9334f2e-1314-4e4a-9bc1-f1c5a241584e-var-lib-openvswitch\") pod \"ovnkube-node-zw4c8\" (UID: \"e9334f2e-1314-4e4a-9bc1-f1c5a241584e\") " pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.436526 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e9334f2e-1314-4e4a-9bc1-f1c5a241584e-ovn-node-metrics-cert\") pod \"ovnkube-node-zw4c8\" (UID: \"e9334f2e-1314-4e4a-9bc1-f1c5a241584e\") " pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.436658 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/e9334f2e-1314-4e4a-9bc1-f1c5a241584e-host-slash\") pod \"ovnkube-node-zw4c8\" (UID: \"e9334f2e-1314-4e4a-9bc1-f1c5a241584e\") " pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.436683 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/e9334f2e-1314-4e4a-9bc1-f1c5a241584e-systemd-units\") pod \"ovnkube-node-zw4c8\" (UID: \"e9334f2e-1314-4e4a-9bc1-f1c5a241584e\") " pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.436700 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/e9334f2e-1314-4e4a-9bc1-f1c5a241584e-ovnkube-script-lib\") pod \"ovnkube-node-zw4c8\" (UID: \"e9334f2e-1314-4e4a-9bc1-f1c5a241584e\") " pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.436727 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e9334f2e-1314-4e4a-9bc1-f1c5a241584e-env-overrides\") pod \"ovnkube-node-zw4c8\" (UID: \"e9334f2e-1314-4e4a-9bc1-f1c5a241584e\") " pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.436741 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/e9334f2e-1314-4e4a-9bc1-f1c5a241584e-run-ovn\") pod \"ovnkube-node-zw4c8\" (UID: \"e9334f2e-1314-4e4a-9bc1-f1c5a241584e\") " pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.436757 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e9334f2e-1314-4e4a-9bc1-f1c5a241584e-ovnkube-config\") pod \"ovnkube-node-zw4c8\" (UID: \"e9334f2e-1314-4e4a-9bc1-f1c5a241584e\") " pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.436771 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e9334f2e-1314-4e4a-9bc1-f1c5a241584e-host-cni-bin\") pod \"ovnkube-node-zw4c8\" (UID: \"e9334f2e-1314-4e4a-9bc1-f1c5a241584e\") " pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.436795 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/e9334f2e-1314-4e4a-9bc1-f1c5a241584e-log-socket\") pod \"ovnkube-node-zw4c8\" (UID: \"e9334f2e-1314-4e4a-9bc1-f1c5a241584e\") " pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.436811 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/e9334f2e-1314-4e4a-9bc1-f1c5a241584e-host-kubelet\") pod \"ovnkube-node-zw4c8\" (UID: \"e9334f2e-1314-4e4a-9bc1-f1c5a241584e\") " pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.436828 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e9334f2e-1314-4e4a-9bc1-f1c5a241584e-run-openvswitch\") pod \"ovnkube-node-zw4c8\" (UID: \"e9334f2e-1314-4e4a-9bc1-f1c5a241584e\") " pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.436844 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/e9334f2e-1314-4e4a-9bc1-f1c5a241584e-host-cni-netd\") pod \"ovnkube-node-zw4c8\" (UID: \"e9334f2e-1314-4e4a-9bc1-f1c5a241584e\") " pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.436858 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/e9334f2e-1314-4e4a-9bc1-f1c5a241584e-run-systemd\") pod \"ovnkube-node-zw4c8\" (UID: \"e9334f2e-1314-4e4a-9bc1-f1c5a241584e\") " pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.436872 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e9334f2e-1314-4e4a-9bc1-f1c5a241584e-etc-openvswitch\") pod \"ovnkube-node-zw4c8\" (UID: \"e9334f2e-1314-4e4a-9bc1-f1c5a241584e\") " pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.436888 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5bhfd\" (UniqueName: \"kubernetes.io/projected/e9334f2e-1314-4e4a-9bc1-f1c5a241584e-kube-api-access-5bhfd\") pod \"ovnkube-node-zw4c8\" (UID: \"e9334f2e-1314-4e4a-9bc1-f1c5a241584e\") " pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.436916 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e9334f2e-1314-4e4a-9bc1-f1c5a241584e-host-run-ovn-kubernetes\") pod \"ovnkube-node-zw4c8\" (UID: \"e9334f2e-1314-4e4a-9bc1-f1c5a241584e\") " pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.436974 4779 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/60d71749-dfb5-4095-b11b-b70f1a549b88-run-systemd\") on node \"crc\" DevicePath \"\"" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.437000 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e9334f2e-1314-4e4a-9bc1-f1c5a241584e-host-cni-bin\") pod \"ovnkube-node-zw4c8\" (UID: \"e9334f2e-1314-4e4a-9bc1-f1c5a241584e\") " pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.437029 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e9334f2e-1314-4e4a-9bc1-f1c5a241584e-host-run-ovn-kubernetes\") pod \"ovnkube-node-zw4c8\" (UID: \"e9334f2e-1314-4e4a-9bc1-f1c5a241584e\") " pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.437057 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/e9334f2e-1314-4e4a-9bc1-f1c5a241584e-host-slash\") pod \"ovnkube-node-zw4c8\" (UID: \"e9334f2e-1314-4e4a-9bc1-f1c5a241584e\") " pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.437065 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/e9334f2e-1314-4e4a-9bc1-f1c5a241584e-systemd-units\") pod \"ovnkube-node-zw4c8\" (UID: \"e9334f2e-1314-4e4a-9bc1-f1c5a241584e\") " pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.437088 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/e9334f2e-1314-4e4a-9bc1-f1c5a241584e-log-socket\") pod \"ovnkube-node-zw4c8\" (UID: \"e9334f2e-1314-4e4a-9bc1-f1c5a241584e\") " pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.437146 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/e9334f2e-1314-4e4a-9bc1-f1c5a241584e-host-kubelet\") pod \"ovnkube-node-zw4c8\" (UID: \"e9334f2e-1314-4e4a-9bc1-f1c5a241584e\") " pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.437181 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e9334f2e-1314-4e4a-9bc1-f1c5a241584e-run-openvswitch\") pod \"ovnkube-node-zw4c8\" (UID: \"e9334f2e-1314-4e4a-9bc1-f1c5a241584e\") " pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.437211 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/e9334f2e-1314-4e4a-9bc1-f1c5a241584e-host-cni-netd\") pod \"ovnkube-node-zw4c8\" (UID: \"e9334f2e-1314-4e4a-9bc1-f1c5a241584e\") " pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.437239 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/e9334f2e-1314-4e4a-9bc1-f1c5a241584e-run-systemd\") pod \"ovnkube-node-zw4c8\" (UID: \"e9334f2e-1314-4e4a-9bc1-f1c5a241584e\") " pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.437283 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e9334f2e-1314-4e4a-9bc1-f1c5a241584e-etc-openvswitch\") pod \"ovnkube-node-zw4c8\" (UID: \"e9334f2e-1314-4e4a-9bc1-f1c5a241584e\") " pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.437005 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m99nd\" (UniqueName: \"kubernetes.io/projected/60d71749-dfb5-4095-b11b-b70f1a549b88-kube-api-access-m99nd\") on node \"crc\" DevicePath \"\"" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.437551 4779 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/60d71749-dfb5-4095-b11b-b70f1a549b88-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.437586 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/e9334f2e-1314-4e4a-9bc1-f1c5a241584e-run-ovn\") pod \"ovnkube-node-zw4c8\" (UID: \"e9334f2e-1314-4e4a-9bc1-f1c5a241584e\") " pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.437746 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/e9334f2e-1314-4e4a-9bc1-f1c5a241584e-ovnkube-script-lib\") pod \"ovnkube-node-zw4c8\" (UID: \"e9334f2e-1314-4e4a-9bc1-f1c5a241584e\") " pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.438090 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e9334f2e-1314-4e4a-9bc1-f1c5a241584e-env-overrides\") pod \"ovnkube-node-zw4c8\" (UID: \"e9334f2e-1314-4e4a-9bc1-f1c5a241584e\") " pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.438327 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e9334f2e-1314-4e4a-9bc1-f1c5a241584e-ovnkube-config\") pod \"ovnkube-node-zw4c8\" (UID: \"e9334f2e-1314-4e4a-9bc1-f1c5a241584e\") " pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.442987 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e9334f2e-1314-4e4a-9bc1-f1c5a241584e-ovn-node-metrics-cert\") pod \"ovnkube-node-zw4c8\" (UID: \"e9334f2e-1314-4e4a-9bc1-f1c5a241584e\") " pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.452619 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5bhfd\" (UniqueName: \"kubernetes.io/projected/e9334f2e-1314-4e4a-9bc1-f1c5a241584e-kube-api-access-5bhfd\") pod \"ovnkube-node-zw4c8\" (UID: \"e9334f2e-1314-4e4a-9bc1-f1c5a241584e\") " pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.561711 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-ncxc4_60d71749-dfb5-4095-b11b-b70f1a549b88/ovnkube-controller/3.log" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.564847 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-ncxc4_60d71749-dfb5-4095-b11b-b70f1a549b88/ovn-acl-logging/0.log" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.565944 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-ncxc4_60d71749-dfb5-4095-b11b-b70f1a549b88/ovn-controller/0.log" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.566541 4779 generic.go:334] "Generic (PLEG): container finished" podID="60d71749-dfb5-4095-b11b-b70f1a549b88" containerID="ae270c4b8f37d6667fbfe00567b00a3aedbc780c87b2638b23a3a8dfc3f36806" exitCode=0 Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.566580 4779 generic.go:334] "Generic (PLEG): container finished" podID="60d71749-dfb5-4095-b11b-b70f1a549b88" containerID="9e140d46db39380a5737d78e1aa00ad1a5a9cc217de910ca4212beae87764795" exitCode=0 Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.566596 4779 generic.go:334] "Generic (PLEG): container finished" podID="60d71749-dfb5-4095-b11b-b70f1a549b88" containerID="7851ecb6e73d56037db56d7d12fec0a095403080daa7bbf0d4cd79d9a4fd1bd4" exitCode=0 Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.566614 4779 generic.go:334] "Generic (PLEG): container finished" podID="60d71749-dfb5-4095-b11b-b70f1a549b88" containerID="fa8dee8aadd625828a22e22668d8f7295ddc3fe24976be6c8610da1157e28d7c" exitCode=0 Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.566631 4779 generic.go:334] "Generic (PLEG): container finished" podID="60d71749-dfb5-4095-b11b-b70f1a549b88" containerID="d79814aeee8cb50cbe9a3153ed1a99a82921ac740553c82f6aafd1d7f628cbdd" exitCode=0 Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.566644 4779 generic.go:334] "Generic (PLEG): container finished" podID="60d71749-dfb5-4095-b11b-b70f1a549b88" containerID="a334fd50a045f97d97f67b45abf88dc6aaf3d7c2121becc6a9744c86c977b070" exitCode=0 Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.566659 4779 generic.go:334] "Generic (PLEG): container finished" podID="60d71749-dfb5-4095-b11b-b70f1a549b88" containerID="a11183ba612a838b92d401b13a2e0d7d1c19de533ecef56b4305e5a9f0784bb4" exitCode=143 Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.566675 4779 generic.go:334] "Generic (PLEG): container finished" podID="60d71749-dfb5-4095-b11b-b70f1a549b88" containerID="6d6677148b1efa21f5eee1dcbccef6d6ea0492513bfa621b1a862488653bc8f3" exitCode=143 Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.566659 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" event={"ID":"60d71749-dfb5-4095-b11b-b70f1a549b88","Type":"ContainerDied","Data":"ae270c4b8f37d6667fbfe00567b00a3aedbc780c87b2638b23a3a8dfc3f36806"} Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.566678 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.566776 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" event={"ID":"60d71749-dfb5-4095-b11b-b70f1a549b88","Type":"ContainerDied","Data":"9e140d46db39380a5737d78e1aa00ad1a5a9cc217de910ca4212beae87764795"} Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.566804 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" event={"ID":"60d71749-dfb5-4095-b11b-b70f1a549b88","Type":"ContainerDied","Data":"7851ecb6e73d56037db56d7d12fec0a095403080daa7bbf0d4cd79d9a4fd1bd4"} Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.566824 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" event={"ID":"60d71749-dfb5-4095-b11b-b70f1a549b88","Type":"ContainerDied","Data":"fa8dee8aadd625828a22e22668d8f7295ddc3fe24976be6c8610da1157e28d7c"} Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.566843 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" event={"ID":"60d71749-dfb5-4095-b11b-b70f1a549b88","Type":"ContainerDied","Data":"d79814aeee8cb50cbe9a3153ed1a99a82921ac740553c82f6aafd1d7f628cbdd"} Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.566821 4779 scope.go:117] "RemoveContainer" containerID="ae270c4b8f37d6667fbfe00567b00a3aedbc780c87b2638b23a3a8dfc3f36806" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.567243 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" event={"ID":"60d71749-dfb5-4095-b11b-b70f1a549b88","Type":"ContainerDied","Data":"a334fd50a045f97d97f67b45abf88dc6aaf3d7c2121becc6a9744c86c977b070"} Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.567313 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1d18b61ad4ffcd8bc213caf632174b18bbb9e1f17b46ba199720c9f155c292f5"} Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.567333 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9e140d46db39380a5737d78e1aa00ad1a5a9cc217de910ca4212beae87764795"} Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.567376 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7851ecb6e73d56037db56d7d12fec0a095403080daa7bbf0d4cd79d9a4fd1bd4"} Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.567391 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fa8dee8aadd625828a22e22668d8f7295ddc3fe24976be6c8610da1157e28d7c"} Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.567402 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d79814aeee8cb50cbe9a3153ed1a99a82921ac740553c82f6aafd1d7f628cbdd"} Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.567412 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a334fd50a045f97d97f67b45abf88dc6aaf3d7c2121becc6a9744c86c977b070"} Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.567462 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a11183ba612a838b92d401b13a2e0d7d1c19de533ecef56b4305e5a9f0784bb4"} Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.567473 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6d6677148b1efa21f5eee1dcbccef6d6ea0492513bfa621b1a862488653bc8f3"} Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.567483 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2"} Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.567563 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" event={"ID":"60d71749-dfb5-4095-b11b-b70f1a549b88","Type":"ContainerDied","Data":"a11183ba612a838b92d401b13a2e0d7d1c19de533ecef56b4305e5a9f0784bb4"} Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.567583 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ae270c4b8f37d6667fbfe00567b00a3aedbc780c87b2638b23a3a8dfc3f36806"} Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.567596 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1d18b61ad4ffcd8bc213caf632174b18bbb9e1f17b46ba199720c9f155c292f5"} Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.567607 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9e140d46db39380a5737d78e1aa00ad1a5a9cc217de910ca4212beae87764795"} Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.567618 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7851ecb6e73d56037db56d7d12fec0a095403080daa7bbf0d4cd79d9a4fd1bd4"} Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.567629 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fa8dee8aadd625828a22e22668d8f7295ddc3fe24976be6c8610da1157e28d7c"} Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.567640 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d79814aeee8cb50cbe9a3153ed1a99a82921ac740553c82f6aafd1d7f628cbdd"} Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.567747 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a334fd50a045f97d97f67b45abf88dc6aaf3d7c2121becc6a9744c86c977b070"} Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.567762 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a11183ba612a838b92d401b13a2e0d7d1c19de533ecef56b4305e5a9f0784bb4"} Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.567773 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6d6677148b1efa21f5eee1dcbccef6d6ea0492513bfa621b1a862488653bc8f3"} Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.567783 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2"} Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.567804 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" event={"ID":"60d71749-dfb5-4095-b11b-b70f1a549b88","Type":"ContainerDied","Data":"6d6677148b1efa21f5eee1dcbccef6d6ea0492513bfa621b1a862488653bc8f3"} Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.567822 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ae270c4b8f37d6667fbfe00567b00a3aedbc780c87b2638b23a3a8dfc3f36806"} Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.567835 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1d18b61ad4ffcd8bc213caf632174b18bbb9e1f17b46ba199720c9f155c292f5"} Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.567846 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9e140d46db39380a5737d78e1aa00ad1a5a9cc217de910ca4212beae87764795"} Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.567856 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7851ecb6e73d56037db56d7d12fec0a095403080daa7bbf0d4cd79d9a4fd1bd4"} Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.567868 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fa8dee8aadd625828a22e22668d8f7295ddc3fe24976be6c8610da1157e28d7c"} Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.567881 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d79814aeee8cb50cbe9a3153ed1a99a82921ac740553c82f6aafd1d7f628cbdd"} Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.567891 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a334fd50a045f97d97f67b45abf88dc6aaf3d7c2121becc6a9744c86c977b070"} Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.567927 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a11183ba612a838b92d401b13a2e0d7d1c19de533ecef56b4305e5a9f0784bb4"} Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.567938 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6d6677148b1efa21f5eee1dcbccef6d6ea0492513bfa621b1a862488653bc8f3"} Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.567949 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2"} Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.567993 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ncxc4" event={"ID":"60d71749-dfb5-4095-b11b-b70f1a549b88","Type":"ContainerDied","Data":"cd89eb93623faf9e6fe6ae575af0eabde3aaed1a89def7af1a48eb45c9854932"} Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.568045 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ae270c4b8f37d6667fbfe00567b00a3aedbc780c87b2638b23a3a8dfc3f36806"} Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.568059 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1d18b61ad4ffcd8bc213caf632174b18bbb9e1f17b46ba199720c9f155c292f5"} Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.568071 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9e140d46db39380a5737d78e1aa00ad1a5a9cc217de910ca4212beae87764795"} Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.568082 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7851ecb6e73d56037db56d7d12fec0a095403080daa7bbf0d4cd79d9a4fd1bd4"} Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.568095 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fa8dee8aadd625828a22e22668d8f7295ddc3fe24976be6c8610da1157e28d7c"} Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.568508 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d79814aeee8cb50cbe9a3153ed1a99a82921ac740553c82f6aafd1d7f628cbdd"} Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.568527 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a334fd50a045f97d97f67b45abf88dc6aaf3d7c2121becc6a9744c86c977b070"} Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.568539 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a11183ba612a838b92d401b13a2e0d7d1c19de533ecef56b4305e5a9f0784bb4"} Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.568549 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6d6677148b1efa21f5eee1dcbccef6d6ea0492513bfa621b1a862488653bc8f3"} Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.568560 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2"} Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.569532 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-f2tkr_6b0e23f7-a478-48e2-a745-193a90e87553/kube-multus/2.log" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.570032 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-f2tkr_6b0e23f7-a478-48e2-a745-193a90e87553/kube-multus/1.log" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.570076 4779 generic.go:334] "Generic (PLEG): container finished" podID="6b0e23f7-a478-48e2-a745-193a90e87553" containerID="4bb23b9d833a44d610758b9d19cba1ff80274c767464a459a50ef01121718270" exitCode=2 Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.570104 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-f2tkr" event={"ID":"6b0e23f7-a478-48e2-a745-193a90e87553","Type":"ContainerDied","Data":"4bb23b9d833a44d610758b9d19cba1ff80274c767464a459a50ef01121718270"} Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.570126 4779 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"10bf019bbe72b02d7f7545cd6f01bde45c32ff2222682d62dac1992cfcdca115"} Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.570567 4779 scope.go:117] "RemoveContainer" containerID="4bb23b9d833a44d610758b9d19cba1ff80274c767464a459a50ef01121718270" Sep 29 09:39:13 crc kubenswrapper[4779]: E0929 09:39:13.570728 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-f2tkr_openshift-multus(6b0e23f7-a478-48e2-a745-193a90e87553)\"" pod="openshift-multus/multus-f2tkr" podUID="6b0e23f7-a478-48e2-a745-193a90e87553" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.577643 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.589635 4779 scope.go:117] "RemoveContainer" containerID="1d18b61ad4ffcd8bc213caf632174b18bbb9e1f17b46ba199720c9f155c292f5" Sep 29 09:39:13 crc kubenswrapper[4779]: W0929 09:39:13.608678 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode9334f2e_1314_4e4a_9bc1_f1c5a241584e.slice/crio-ec572eb9e6e562524322ec858d0bc9cd6cb4f3dc1227e87e5ed95bd89bc7b73d WatchSource:0}: Error finding container ec572eb9e6e562524322ec858d0bc9cd6cb4f3dc1227e87e5ed95bd89bc7b73d: Status 404 returned error can't find the container with id ec572eb9e6e562524322ec858d0bc9cd6cb4f3dc1227e87e5ed95bd89bc7b73d Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.622964 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-ncxc4"] Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.624264 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-ncxc4"] Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.634205 4779 scope.go:117] "RemoveContainer" containerID="9e140d46db39380a5737d78e1aa00ad1a5a9cc217de910ca4212beae87764795" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.656316 4779 scope.go:117] "RemoveContainer" containerID="7851ecb6e73d56037db56d7d12fec0a095403080daa7bbf0d4cd79d9a4fd1bd4" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.672641 4779 scope.go:117] "RemoveContainer" containerID="fa8dee8aadd625828a22e22668d8f7295ddc3fe24976be6c8610da1157e28d7c" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.695496 4779 scope.go:117] "RemoveContainer" containerID="d79814aeee8cb50cbe9a3153ed1a99a82921ac740553c82f6aafd1d7f628cbdd" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.712482 4779 scope.go:117] "RemoveContainer" containerID="a334fd50a045f97d97f67b45abf88dc6aaf3d7c2121becc6a9744c86c977b070" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.741557 4779 scope.go:117] "RemoveContainer" containerID="a11183ba612a838b92d401b13a2e0d7d1c19de533ecef56b4305e5a9f0784bb4" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.799425 4779 scope.go:117] "RemoveContainer" containerID="6d6677148b1efa21f5eee1dcbccef6d6ea0492513bfa621b1a862488653bc8f3" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.819669 4779 scope.go:117] "RemoveContainer" containerID="4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.835636 4779 scope.go:117] "RemoveContainer" containerID="ae270c4b8f37d6667fbfe00567b00a3aedbc780c87b2638b23a3a8dfc3f36806" Sep 29 09:39:13 crc kubenswrapper[4779]: E0929 09:39:13.836126 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ae270c4b8f37d6667fbfe00567b00a3aedbc780c87b2638b23a3a8dfc3f36806\": container with ID starting with ae270c4b8f37d6667fbfe00567b00a3aedbc780c87b2638b23a3a8dfc3f36806 not found: ID does not exist" containerID="ae270c4b8f37d6667fbfe00567b00a3aedbc780c87b2638b23a3a8dfc3f36806" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.836163 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae270c4b8f37d6667fbfe00567b00a3aedbc780c87b2638b23a3a8dfc3f36806"} err="failed to get container status \"ae270c4b8f37d6667fbfe00567b00a3aedbc780c87b2638b23a3a8dfc3f36806\": rpc error: code = NotFound desc = could not find container \"ae270c4b8f37d6667fbfe00567b00a3aedbc780c87b2638b23a3a8dfc3f36806\": container with ID starting with ae270c4b8f37d6667fbfe00567b00a3aedbc780c87b2638b23a3a8dfc3f36806 not found: ID does not exist" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.836187 4779 scope.go:117] "RemoveContainer" containerID="1d18b61ad4ffcd8bc213caf632174b18bbb9e1f17b46ba199720c9f155c292f5" Sep 29 09:39:13 crc kubenswrapper[4779]: E0929 09:39:13.836535 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1d18b61ad4ffcd8bc213caf632174b18bbb9e1f17b46ba199720c9f155c292f5\": container with ID starting with 1d18b61ad4ffcd8bc213caf632174b18bbb9e1f17b46ba199720c9f155c292f5 not found: ID does not exist" containerID="1d18b61ad4ffcd8bc213caf632174b18bbb9e1f17b46ba199720c9f155c292f5" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.836611 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d18b61ad4ffcd8bc213caf632174b18bbb9e1f17b46ba199720c9f155c292f5"} err="failed to get container status \"1d18b61ad4ffcd8bc213caf632174b18bbb9e1f17b46ba199720c9f155c292f5\": rpc error: code = NotFound desc = could not find container \"1d18b61ad4ffcd8bc213caf632174b18bbb9e1f17b46ba199720c9f155c292f5\": container with ID starting with 1d18b61ad4ffcd8bc213caf632174b18bbb9e1f17b46ba199720c9f155c292f5 not found: ID does not exist" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.836667 4779 scope.go:117] "RemoveContainer" containerID="9e140d46db39380a5737d78e1aa00ad1a5a9cc217de910ca4212beae87764795" Sep 29 09:39:13 crc kubenswrapper[4779]: E0929 09:39:13.837165 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9e140d46db39380a5737d78e1aa00ad1a5a9cc217de910ca4212beae87764795\": container with ID starting with 9e140d46db39380a5737d78e1aa00ad1a5a9cc217de910ca4212beae87764795 not found: ID does not exist" containerID="9e140d46db39380a5737d78e1aa00ad1a5a9cc217de910ca4212beae87764795" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.837211 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9e140d46db39380a5737d78e1aa00ad1a5a9cc217de910ca4212beae87764795"} err="failed to get container status \"9e140d46db39380a5737d78e1aa00ad1a5a9cc217de910ca4212beae87764795\": rpc error: code = NotFound desc = could not find container \"9e140d46db39380a5737d78e1aa00ad1a5a9cc217de910ca4212beae87764795\": container with ID starting with 9e140d46db39380a5737d78e1aa00ad1a5a9cc217de910ca4212beae87764795 not found: ID does not exist" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.837269 4779 scope.go:117] "RemoveContainer" containerID="7851ecb6e73d56037db56d7d12fec0a095403080daa7bbf0d4cd79d9a4fd1bd4" Sep 29 09:39:13 crc kubenswrapper[4779]: E0929 09:39:13.837609 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7851ecb6e73d56037db56d7d12fec0a095403080daa7bbf0d4cd79d9a4fd1bd4\": container with ID starting with 7851ecb6e73d56037db56d7d12fec0a095403080daa7bbf0d4cd79d9a4fd1bd4 not found: ID does not exist" containerID="7851ecb6e73d56037db56d7d12fec0a095403080daa7bbf0d4cd79d9a4fd1bd4" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.837656 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7851ecb6e73d56037db56d7d12fec0a095403080daa7bbf0d4cd79d9a4fd1bd4"} err="failed to get container status \"7851ecb6e73d56037db56d7d12fec0a095403080daa7bbf0d4cd79d9a4fd1bd4\": rpc error: code = NotFound desc = could not find container \"7851ecb6e73d56037db56d7d12fec0a095403080daa7bbf0d4cd79d9a4fd1bd4\": container with ID starting with 7851ecb6e73d56037db56d7d12fec0a095403080daa7bbf0d4cd79d9a4fd1bd4 not found: ID does not exist" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.837689 4779 scope.go:117] "RemoveContainer" containerID="fa8dee8aadd625828a22e22668d8f7295ddc3fe24976be6c8610da1157e28d7c" Sep 29 09:39:13 crc kubenswrapper[4779]: E0929 09:39:13.838333 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fa8dee8aadd625828a22e22668d8f7295ddc3fe24976be6c8610da1157e28d7c\": container with ID starting with fa8dee8aadd625828a22e22668d8f7295ddc3fe24976be6c8610da1157e28d7c not found: ID does not exist" containerID="fa8dee8aadd625828a22e22668d8f7295ddc3fe24976be6c8610da1157e28d7c" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.838364 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa8dee8aadd625828a22e22668d8f7295ddc3fe24976be6c8610da1157e28d7c"} err="failed to get container status \"fa8dee8aadd625828a22e22668d8f7295ddc3fe24976be6c8610da1157e28d7c\": rpc error: code = NotFound desc = could not find container \"fa8dee8aadd625828a22e22668d8f7295ddc3fe24976be6c8610da1157e28d7c\": container with ID starting with fa8dee8aadd625828a22e22668d8f7295ddc3fe24976be6c8610da1157e28d7c not found: ID does not exist" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.838659 4779 scope.go:117] "RemoveContainer" containerID="d79814aeee8cb50cbe9a3153ed1a99a82921ac740553c82f6aafd1d7f628cbdd" Sep 29 09:39:13 crc kubenswrapper[4779]: E0929 09:39:13.839055 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d79814aeee8cb50cbe9a3153ed1a99a82921ac740553c82f6aafd1d7f628cbdd\": container with ID starting with d79814aeee8cb50cbe9a3153ed1a99a82921ac740553c82f6aafd1d7f628cbdd not found: ID does not exist" containerID="d79814aeee8cb50cbe9a3153ed1a99a82921ac740553c82f6aafd1d7f628cbdd" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.839112 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d79814aeee8cb50cbe9a3153ed1a99a82921ac740553c82f6aafd1d7f628cbdd"} err="failed to get container status \"d79814aeee8cb50cbe9a3153ed1a99a82921ac740553c82f6aafd1d7f628cbdd\": rpc error: code = NotFound desc = could not find container \"d79814aeee8cb50cbe9a3153ed1a99a82921ac740553c82f6aafd1d7f628cbdd\": container with ID starting with d79814aeee8cb50cbe9a3153ed1a99a82921ac740553c82f6aafd1d7f628cbdd not found: ID does not exist" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.839156 4779 scope.go:117] "RemoveContainer" containerID="a334fd50a045f97d97f67b45abf88dc6aaf3d7c2121becc6a9744c86c977b070" Sep 29 09:39:13 crc kubenswrapper[4779]: E0929 09:39:13.839504 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a334fd50a045f97d97f67b45abf88dc6aaf3d7c2121becc6a9744c86c977b070\": container with ID starting with a334fd50a045f97d97f67b45abf88dc6aaf3d7c2121becc6a9744c86c977b070 not found: ID does not exist" containerID="a334fd50a045f97d97f67b45abf88dc6aaf3d7c2121becc6a9744c86c977b070" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.839538 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a334fd50a045f97d97f67b45abf88dc6aaf3d7c2121becc6a9744c86c977b070"} err="failed to get container status \"a334fd50a045f97d97f67b45abf88dc6aaf3d7c2121becc6a9744c86c977b070\": rpc error: code = NotFound desc = could not find container \"a334fd50a045f97d97f67b45abf88dc6aaf3d7c2121becc6a9744c86c977b070\": container with ID starting with a334fd50a045f97d97f67b45abf88dc6aaf3d7c2121becc6a9744c86c977b070 not found: ID does not exist" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.839556 4779 scope.go:117] "RemoveContainer" containerID="a11183ba612a838b92d401b13a2e0d7d1c19de533ecef56b4305e5a9f0784bb4" Sep 29 09:39:13 crc kubenswrapper[4779]: E0929 09:39:13.840187 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a11183ba612a838b92d401b13a2e0d7d1c19de533ecef56b4305e5a9f0784bb4\": container with ID starting with a11183ba612a838b92d401b13a2e0d7d1c19de533ecef56b4305e5a9f0784bb4 not found: ID does not exist" containerID="a11183ba612a838b92d401b13a2e0d7d1c19de533ecef56b4305e5a9f0784bb4" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.840237 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a11183ba612a838b92d401b13a2e0d7d1c19de533ecef56b4305e5a9f0784bb4"} err="failed to get container status \"a11183ba612a838b92d401b13a2e0d7d1c19de533ecef56b4305e5a9f0784bb4\": rpc error: code = NotFound desc = could not find container \"a11183ba612a838b92d401b13a2e0d7d1c19de533ecef56b4305e5a9f0784bb4\": container with ID starting with a11183ba612a838b92d401b13a2e0d7d1c19de533ecef56b4305e5a9f0784bb4 not found: ID does not exist" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.840275 4779 scope.go:117] "RemoveContainer" containerID="6d6677148b1efa21f5eee1dcbccef6d6ea0492513bfa621b1a862488653bc8f3" Sep 29 09:39:13 crc kubenswrapper[4779]: E0929 09:39:13.840764 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6d6677148b1efa21f5eee1dcbccef6d6ea0492513bfa621b1a862488653bc8f3\": container with ID starting with 6d6677148b1efa21f5eee1dcbccef6d6ea0492513bfa621b1a862488653bc8f3 not found: ID does not exist" containerID="6d6677148b1efa21f5eee1dcbccef6d6ea0492513bfa621b1a862488653bc8f3" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.840813 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d6677148b1efa21f5eee1dcbccef6d6ea0492513bfa621b1a862488653bc8f3"} err="failed to get container status \"6d6677148b1efa21f5eee1dcbccef6d6ea0492513bfa621b1a862488653bc8f3\": rpc error: code = NotFound desc = could not find container \"6d6677148b1efa21f5eee1dcbccef6d6ea0492513bfa621b1a862488653bc8f3\": container with ID starting with 6d6677148b1efa21f5eee1dcbccef6d6ea0492513bfa621b1a862488653bc8f3 not found: ID does not exist" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.840841 4779 scope.go:117] "RemoveContainer" containerID="4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2" Sep 29 09:39:13 crc kubenswrapper[4779]: E0929 09:39:13.841179 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\": container with ID starting with 4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2 not found: ID does not exist" containerID="4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.841208 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2"} err="failed to get container status \"4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\": rpc error: code = NotFound desc = could not find container \"4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\": container with ID starting with 4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2 not found: ID does not exist" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.841226 4779 scope.go:117] "RemoveContainer" containerID="ae270c4b8f37d6667fbfe00567b00a3aedbc780c87b2638b23a3a8dfc3f36806" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.841666 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae270c4b8f37d6667fbfe00567b00a3aedbc780c87b2638b23a3a8dfc3f36806"} err="failed to get container status \"ae270c4b8f37d6667fbfe00567b00a3aedbc780c87b2638b23a3a8dfc3f36806\": rpc error: code = NotFound desc = could not find container \"ae270c4b8f37d6667fbfe00567b00a3aedbc780c87b2638b23a3a8dfc3f36806\": container with ID starting with ae270c4b8f37d6667fbfe00567b00a3aedbc780c87b2638b23a3a8dfc3f36806 not found: ID does not exist" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.841729 4779 scope.go:117] "RemoveContainer" containerID="1d18b61ad4ffcd8bc213caf632174b18bbb9e1f17b46ba199720c9f155c292f5" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.843292 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d18b61ad4ffcd8bc213caf632174b18bbb9e1f17b46ba199720c9f155c292f5"} err="failed to get container status \"1d18b61ad4ffcd8bc213caf632174b18bbb9e1f17b46ba199720c9f155c292f5\": rpc error: code = NotFound desc = could not find container \"1d18b61ad4ffcd8bc213caf632174b18bbb9e1f17b46ba199720c9f155c292f5\": container with ID starting with 1d18b61ad4ffcd8bc213caf632174b18bbb9e1f17b46ba199720c9f155c292f5 not found: ID does not exist" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.843338 4779 scope.go:117] "RemoveContainer" containerID="9e140d46db39380a5737d78e1aa00ad1a5a9cc217de910ca4212beae87764795" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.843724 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9e140d46db39380a5737d78e1aa00ad1a5a9cc217de910ca4212beae87764795"} err="failed to get container status \"9e140d46db39380a5737d78e1aa00ad1a5a9cc217de910ca4212beae87764795\": rpc error: code = NotFound desc = could not find container \"9e140d46db39380a5737d78e1aa00ad1a5a9cc217de910ca4212beae87764795\": container with ID starting with 9e140d46db39380a5737d78e1aa00ad1a5a9cc217de910ca4212beae87764795 not found: ID does not exist" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.843751 4779 scope.go:117] "RemoveContainer" containerID="7851ecb6e73d56037db56d7d12fec0a095403080daa7bbf0d4cd79d9a4fd1bd4" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.844128 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7851ecb6e73d56037db56d7d12fec0a095403080daa7bbf0d4cd79d9a4fd1bd4"} err="failed to get container status \"7851ecb6e73d56037db56d7d12fec0a095403080daa7bbf0d4cd79d9a4fd1bd4\": rpc error: code = NotFound desc = could not find container \"7851ecb6e73d56037db56d7d12fec0a095403080daa7bbf0d4cd79d9a4fd1bd4\": container with ID starting with 7851ecb6e73d56037db56d7d12fec0a095403080daa7bbf0d4cd79d9a4fd1bd4 not found: ID does not exist" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.844178 4779 scope.go:117] "RemoveContainer" containerID="fa8dee8aadd625828a22e22668d8f7295ddc3fe24976be6c8610da1157e28d7c" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.844681 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa8dee8aadd625828a22e22668d8f7295ddc3fe24976be6c8610da1157e28d7c"} err="failed to get container status \"fa8dee8aadd625828a22e22668d8f7295ddc3fe24976be6c8610da1157e28d7c\": rpc error: code = NotFound desc = could not find container \"fa8dee8aadd625828a22e22668d8f7295ddc3fe24976be6c8610da1157e28d7c\": container with ID starting with fa8dee8aadd625828a22e22668d8f7295ddc3fe24976be6c8610da1157e28d7c not found: ID does not exist" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.844735 4779 scope.go:117] "RemoveContainer" containerID="d79814aeee8cb50cbe9a3153ed1a99a82921ac740553c82f6aafd1d7f628cbdd" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.845138 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d79814aeee8cb50cbe9a3153ed1a99a82921ac740553c82f6aafd1d7f628cbdd"} err="failed to get container status \"d79814aeee8cb50cbe9a3153ed1a99a82921ac740553c82f6aafd1d7f628cbdd\": rpc error: code = NotFound desc = could not find container \"d79814aeee8cb50cbe9a3153ed1a99a82921ac740553c82f6aafd1d7f628cbdd\": container with ID starting with d79814aeee8cb50cbe9a3153ed1a99a82921ac740553c82f6aafd1d7f628cbdd not found: ID does not exist" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.845182 4779 scope.go:117] "RemoveContainer" containerID="a334fd50a045f97d97f67b45abf88dc6aaf3d7c2121becc6a9744c86c977b070" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.845465 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a334fd50a045f97d97f67b45abf88dc6aaf3d7c2121becc6a9744c86c977b070"} err="failed to get container status \"a334fd50a045f97d97f67b45abf88dc6aaf3d7c2121becc6a9744c86c977b070\": rpc error: code = NotFound desc = could not find container \"a334fd50a045f97d97f67b45abf88dc6aaf3d7c2121becc6a9744c86c977b070\": container with ID starting with a334fd50a045f97d97f67b45abf88dc6aaf3d7c2121becc6a9744c86c977b070 not found: ID does not exist" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.845497 4779 scope.go:117] "RemoveContainer" containerID="a11183ba612a838b92d401b13a2e0d7d1c19de533ecef56b4305e5a9f0784bb4" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.845955 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a11183ba612a838b92d401b13a2e0d7d1c19de533ecef56b4305e5a9f0784bb4"} err="failed to get container status \"a11183ba612a838b92d401b13a2e0d7d1c19de533ecef56b4305e5a9f0784bb4\": rpc error: code = NotFound desc = could not find container \"a11183ba612a838b92d401b13a2e0d7d1c19de533ecef56b4305e5a9f0784bb4\": container with ID starting with a11183ba612a838b92d401b13a2e0d7d1c19de533ecef56b4305e5a9f0784bb4 not found: ID does not exist" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.845981 4779 scope.go:117] "RemoveContainer" containerID="6d6677148b1efa21f5eee1dcbccef6d6ea0492513bfa621b1a862488653bc8f3" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.846317 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d6677148b1efa21f5eee1dcbccef6d6ea0492513bfa621b1a862488653bc8f3"} err="failed to get container status \"6d6677148b1efa21f5eee1dcbccef6d6ea0492513bfa621b1a862488653bc8f3\": rpc error: code = NotFound desc = could not find container \"6d6677148b1efa21f5eee1dcbccef6d6ea0492513bfa621b1a862488653bc8f3\": container with ID starting with 6d6677148b1efa21f5eee1dcbccef6d6ea0492513bfa621b1a862488653bc8f3 not found: ID does not exist" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.846371 4779 scope.go:117] "RemoveContainer" containerID="4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.846634 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2"} err="failed to get container status \"4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\": rpc error: code = NotFound desc = could not find container \"4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\": container with ID starting with 4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2 not found: ID does not exist" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.846660 4779 scope.go:117] "RemoveContainer" containerID="ae270c4b8f37d6667fbfe00567b00a3aedbc780c87b2638b23a3a8dfc3f36806" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.846988 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae270c4b8f37d6667fbfe00567b00a3aedbc780c87b2638b23a3a8dfc3f36806"} err="failed to get container status \"ae270c4b8f37d6667fbfe00567b00a3aedbc780c87b2638b23a3a8dfc3f36806\": rpc error: code = NotFound desc = could not find container \"ae270c4b8f37d6667fbfe00567b00a3aedbc780c87b2638b23a3a8dfc3f36806\": container with ID starting with ae270c4b8f37d6667fbfe00567b00a3aedbc780c87b2638b23a3a8dfc3f36806 not found: ID does not exist" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.847017 4779 scope.go:117] "RemoveContainer" containerID="1d18b61ad4ffcd8bc213caf632174b18bbb9e1f17b46ba199720c9f155c292f5" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.847274 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d18b61ad4ffcd8bc213caf632174b18bbb9e1f17b46ba199720c9f155c292f5"} err="failed to get container status \"1d18b61ad4ffcd8bc213caf632174b18bbb9e1f17b46ba199720c9f155c292f5\": rpc error: code = NotFound desc = could not find container \"1d18b61ad4ffcd8bc213caf632174b18bbb9e1f17b46ba199720c9f155c292f5\": container with ID starting with 1d18b61ad4ffcd8bc213caf632174b18bbb9e1f17b46ba199720c9f155c292f5 not found: ID does not exist" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.847301 4779 scope.go:117] "RemoveContainer" containerID="9e140d46db39380a5737d78e1aa00ad1a5a9cc217de910ca4212beae87764795" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.847813 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9e140d46db39380a5737d78e1aa00ad1a5a9cc217de910ca4212beae87764795"} err="failed to get container status \"9e140d46db39380a5737d78e1aa00ad1a5a9cc217de910ca4212beae87764795\": rpc error: code = NotFound desc = could not find container \"9e140d46db39380a5737d78e1aa00ad1a5a9cc217de910ca4212beae87764795\": container with ID starting with 9e140d46db39380a5737d78e1aa00ad1a5a9cc217de910ca4212beae87764795 not found: ID does not exist" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.847855 4779 scope.go:117] "RemoveContainer" containerID="7851ecb6e73d56037db56d7d12fec0a095403080daa7bbf0d4cd79d9a4fd1bd4" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.848201 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7851ecb6e73d56037db56d7d12fec0a095403080daa7bbf0d4cd79d9a4fd1bd4"} err="failed to get container status \"7851ecb6e73d56037db56d7d12fec0a095403080daa7bbf0d4cd79d9a4fd1bd4\": rpc error: code = NotFound desc = could not find container \"7851ecb6e73d56037db56d7d12fec0a095403080daa7bbf0d4cd79d9a4fd1bd4\": container with ID starting with 7851ecb6e73d56037db56d7d12fec0a095403080daa7bbf0d4cd79d9a4fd1bd4 not found: ID does not exist" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.848227 4779 scope.go:117] "RemoveContainer" containerID="fa8dee8aadd625828a22e22668d8f7295ddc3fe24976be6c8610da1157e28d7c" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.848679 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa8dee8aadd625828a22e22668d8f7295ddc3fe24976be6c8610da1157e28d7c"} err="failed to get container status \"fa8dee8aadd625828a22e22668d8f7295ddc3fe24976be6c8610da1157e28d7c\": rpc error: code = NotFound desc = could not find container \"fa8dee8aadd625828a22e22668d8f7295ddc3fe24976be6c8610da1157e28d7c\": container with ID starting with fa8dee8aadd625828a22e22668d8f7295ddc3fe24976be6c8610da1157e28d7c not found: ID does not exist" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.848724 4779 scope.go:117] "RemoveContainer" containerID="d79814aeee8cb50cbe9a3153ed1a99a82921ac740553c82f6aafd1d7f628cbdd" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.849122 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d79814aeee8cb50cbe9a3153ed1a99a82921ac740553c82f6aafd1d7f628cbdd"} err="failed to get container status \"d79814aeee8cb50cbe9a3153ed1a99a82921ac740553c82f6aafd1d7f628cbdd\": rpc error: code = NotFound desc = could not find container \"d79814aeee8cb50cbe9a3153ed1a99a82921ac740553c82f6aafd1d7f628cbdd\": container with ID starting with d79814aeee8cb50cbe9a3153ed1a99a82921ac740553c82f6aafd1d7f628cbdd not found: ID does not exist" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.849164 4779 scope.go:117] "RemoveContainer" containerID="a334fd50a045f97d97f67b45abf88dc6aaf3d7c2121becc6a9744c86c977b070" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.849598 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a334fd50a045f97d97f67b45abf88dc6aaf3d7c2121becc6a9744c86c977b070"} err="failed to get container status \"a334fd50a045f97d97f67b45abf88dc6aaf3d7c2121becc6a9744c86c977b070\": rpc error: code = NotFound desc = could not find container \"a334fd50a045f97d97f67b45abf88dc6aaf3d7c2121becc6a9744c86c977b070\": container with ID starting with a334fd50a045f97d97f67b45abf88dc6aaf3d7c2121becc6a9744c86c977b070 not found: ID does not exist" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.849630 4779 scope.go:117] "RemoveContainer" containerID="a11183ba612a838b92d401b13a2e0d7d1c19de533ecef56b4305e5a9f0784bb4" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.850025 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a11183ba612a838b92d401b13a2e0d7d1c19de533ecef56b4305e5a9f0784bb4"} err="failed to get container status \"a11183ba612a838b92d401b13a2e0d7d1c19de533ecef56b4305e5a9f0784bb4\": rpc error: code = NotFound desc = could not find container \"a11183ba612a838b92d401b13a2e0d7d1c19de533ecef56b4305e5a9f0784bb4\": container with ID starting with a11183ba612a838b92d401b13a2e0d7d1c19de533ecef56b4305e5a9f0784bb4 not found: ID does not exist" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.850068 4779 scope.go:117] "RemoveContainer" containerID="6d6677148b1efa21f5eee1dcbccef6d6ea0492513bfa621b1a862488653bc8f3" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.850429 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d6677148b1efa21f5eee1dcbccef6d6ea0492513bfa621b1a862488653bc8f3"} err="failed to get container status \"6d6677148b1efa21f5eee1dcbccef6d6ea0492513bfa621b1a862488653bc8f3\": rpc error: code = NotFound desc = could not find container \"6d6677148b1efa21f5eee1dcbccef6d6ea0492513bfa621b1a862488653bc8f3\": container with ID starting with 6d6677148b1efa21f5eee1dcbccef6d6ea0492513bfa621b1a862488653bc8f3 not found: ID does not exist" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.850459 4779 scope.go:117] "RemoveContainer" containerID="4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.850871 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2"} err="failed to get container status \"4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\": rpc error: code = NotFound desc = could not find container \"4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\": container with ID starting with 4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2 not found: ID does not exist" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.850945 4779 scope.go:117] "RemoveContainer" containerID="ae270c4b8f37d6667fbfe00567b00a3aedbc780c87b2638b23a3a8dfc3f36806" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.851507 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae270c4b8f37d6667fbfe00567b00a3aedbc780c87b2638b23a3a8dfc3f36806"} err="failed to get container status \"ae270c4b8f37d6667fbfe00567b00a3aedbc780c87b2638b23a3a8dfc3f36806\": rpc error: code = NotFound desc = could not find container \"ae270c4b8f37d6667fbfe00567b00a3aedbc780c87b2638b23a3a8dfc3f36806\": container with ID starting with ae270c4b8f37d6667fbfe00567b00a3aedbc780c87b2638b23a3a8dfc3f36806 not found: ID does not exist" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.851540 4779 scope.go:117] "RemoveContainer" containerID="1d18b61ad4ffcd8bc213caf632174b18bbb9e1f17b46ba199720c9f155c292f5" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.851956 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d18b61ad4ffcd8bc213caf632174b18bbb9e1f17b46ba199720c9f155c292f5"} err="failed to get container status \"1d18b61ad4ffcd8bc213caf632174b18bbb9e1f17b46ba199720c9f155c292f5\": rpc error: code = NotFound desc = could not find container \"1d18b61ad4ffcd8bc213caf632174b18bbb9e1f17b46ba199720c9f155c292f5\": container with ID starting with 1d18b61ad4ffcd8bc213caf632174b18bbb9e1f17b46ba199720c9f155c292f5 not found: ID does not exist" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.851996 4779 scope.go:117] "RemoveContainer" containerID="9e140d46db39380a5737d78e1aa00ad1a5a9cc217de910ca4212beae87764795" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.852441 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9e140d46db39380a5737d78e1aa00ad1a5a9cc217de910ca4212beae87764795"} err="failed to get container status \"9e140d46db39380a5737d78e1aa00ad1a5a9cc217de910ca4212beae87764795\": rpc error: code = NotFound desc = could not find container \"9e140d46db39380a5737d78e1aa00ad1a5a9cc217de910ca4212beae87764795\": container with ID starting with 9e140d46db39380a5737d78e1aa00ad1a5a9cc217de910ca4212beae87764795 not found: ID does not exist" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.852483 4779 scope.go:117] "RemoveContainer" containerID="7851ecb6e73d56037db56d7d12fec0a095403080daa7bbf0d4cd79d9a4fd1bd4" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.853007 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7851ecb6e73d56037db56d7d12fec0a095403080daa7bbf0d4cd79d9a4fd1bd4"} err="failed to get container status \"7851ecb6e73d56037db56d7d12fec0a095403080daa7bbf0d4cd79d9a4fd1bd4\": rpc error: code = NotFound desc = could not find container \"7851ecb6e73d56037db56d7d12fec0a095403080daa7bbf0d4cd79d9a4fd1bd4\": container with ID starting with 7851ecb6e73d56037db56d7d12fec0a095403080daa7bbf0d4cd79d9a4fd1bd4 not found: ID does not exist" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.853044 4779 scope.go:117] "RemoveContainer" containerID="fa8dee8aadd625828a22e22668d8f7295ddc3fe24976be6c8610da1157e28d7c" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.853626 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa8dee8aadd625828a22e22668d8f7295ddc3fe24976be6c8610da1157e28d7c"} err="failed to get container status \"fa8dee8aadd625828a22e22668d8f7295ddc3fe24976be6c8610da1157e28d7c\": rpc error: code = NotFound desc = could not find container \"fa8dee8aadd625828a22e22668d8f7295ddc3fe24976be6c8610da1157e28d7c\": container with ID starting with fa8dee8aadd625828a22e22668d8f7295ddc3fe24976be6c8610da1157e28d7c not found: ID does not exist" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.853667 4779 scope.go:117] "RemoveContainer" containerID="d79814aeee8cb50cbe9a3153ed1a99a82921ac740553c82f6aafd1d7f628cbdd" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.853995 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d79814aeee8cb50cbe9a3153ed1a99a82921ac740553c82f6aafd1d7f628cbdd"} err="failed to get container status \"d79814aeee8cb50cbe9a3153ed1a99a82921ac740553c82f6aafd1d7f628cbdd\": rpc error: code = NotFound desc = could not find container \"d79814aeee8cb50cbe9a3153ed1a99a82921ac740553c82f6aafd1d7f628cbdd\": container with ID starting with d79814aeee8cb50cbe9a3153ed1a99a82921ac740553c82f6aafd1d7f628cbdd not found: ID does not exist" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.854033 4779 scope.go:117] "RemoveContainer" containerID="a334fd50a045f97d97f67b45abf88dc6aaf3d7c2121becc6a9744c86c977b070" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.854577 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a334fd50a045f97d97f67b45abf88dc6aaf3d7c2121becc6a9744c86c977b070"} err="failed to get container status \"a334fd50a045f97d97f67b45abf88dc6aaf3d7c2121becc6a9744c86c977b070\": rpc error: code = NotFound desc = could not find container \"a334fd50a045f97d97f67b45abf88dc6aaf3d7c2121becc6a9744c86c977b070\": container with ID starting with a334fd50a045f97d97f67b45abf88dc6aaf3d7c2121becc6a9744c86c977b070 not found: ID does not exist" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.854610 4779 scope.go:117] "RemoveContainer" containerID="a11183ba612a838b92d401b13a2e0d7d1c19de533ecef56b4305e5a9f0784bb4" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.855051 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a11183ba612a838b92d401b13a2e0d7d1c19de533ecef56b4305e5a9f0784bb4"} err="failed to get container status \"a11183ba612a838b92d401b13a2e0d7d1c19de533ecef56b4305e5a9f0784bb4\": rpc error: code = NotFound desc = could not find container \"a11183ba612a838b92d401b13a2e0d7d1c19de533ecef56b4305e5a9f0784bb4\": container with ID starting with a11183ba612a838b92d401b13a2e0d7d1c19de533ecef56b4305e5a9f0784bb4 not found: ID does not exist" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.855082 4779 scope.go:117] "RemoveContainer" containerID="6d6677148b1efa21f5eee1dcbccef6d6ea0492513bfa621b1a862488653bc8f3" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.855382 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d6677148b1efa21f5eee1dcbccef6d6ea0492513bfa621b1a862488653bc8f3"} err="failed to get container status \"6d6677148b1efa21f5eee1dcbccef6d6ea0492513bfa621b1a862488653bc8f3\": rpc error: code = NotFound desc = could not find container \"6d6677148b1efa21f5eee1dcbccef6d6ea0492513bfa621b1a862488653bc8f3\": container with ID starting with 6d6677148b1efa21f5eee1dcbccef6d6ea0492513bfa621b1a862488653bc8f3 not found: ID does not exist" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.855427 4779 scope.go:117] "RemoveContainer" containerID="4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.855776 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2"} err="failed to get container status \"4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\": rpc error: code = NotFound desc = could not find container \"4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2\": container with ID starting with 4c2d7b44dba5cc23de1b4ec459d560a2bdff0182a04f6f24ffcf84552bd67db2 not found: ID does not exist" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.855816 4779 scope.go:117] "RemoveContainer" containerID="ae270c4b8f37d6667fbfe00567b00a3aedbc780c87b2638b23a3a8dfc3f36806" Sep 29 09:39:13 crc kubenswrapper[4779]: I0929 09:39:13.856330 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae270c4b8f37d6667fbfe00567b00a3aedbc780c87b2638b23a3a8dfc3f36806"} err="failed to get container status \"ae270c4b8f37d6667fbfe00567b00a3aedbc780c87b2638b23a3a8dfc3f36806\": rpc error: code = NotFound desc = could not find container \"ae270c4b8f37d6667fbfe00567b00a3aedbc780c87b2638b23a3a8dfc3f36806\": container with ID starting with ae270c4b8f37d6667fbfe00567b00a3aedbc780c87b2638b23a3a8dfc3f36806 not found: ID does not exist" Sep 29 09:39:14 crc kubenswrapper[4779]: I0929 09:39:14.580565 4779 generic.go:334] "Generic (PLEG): container finished" podID="e9334f2e-1314-4e4a-9bc1-f1c5a241584e" containerID="d174088d4521225e8b7e49e73a3b1202c741a22310855ad8d50f7087c9674638" exitCode=0 Sep 29 09:39:14 crc kubenswrapper[4779]: I0929 09:39:14.580654 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" event={"ID":"e9334f2e-1314-4e4a-9bc1-f1c5a241584e","Type":"ContainerDied","Data":"d174088d4521225e8b7e49e73a3b1202c741a22310855ad8d50f7087c9674638"} Sep 29 09:39:14 crc kubenswrapper[4779]: I0929 09:39:14.580698 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" event={"ID":"e9334f2e-1314-4e4a-9bc1-f1c5a241584e","Type":"ContainerStarted","Data":"ec572eb9e6e562524322ec858d0bc9cd6cb4f3dc1227e87e5ed95bd89bc7b73d"} Sep 29 09:39:14 crc kubenswrapper[4779]: I0929 09:39:14.721725 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="60d71749-dfb5-4095-b11b-b70f1a549b88" path="/var/lib/kubelet/pods/60d71749-dfb5-4095-b11b-b70f1a549b88/volumes" Sep 29 09:39:15 crc kubenswrapper[4779]: I0929 09:39:15.594147 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" event={"ID":"e9334f2e-1314-4e4a-9bc1-f1c5a241584e","Type":"ContainerStarted","Data":"4dd417f75b77990cf21d97b0cb572cbe32e8314886aacf7a8006191f13c5da11"} Sep 29 09:39:15 crc kubenswrapper[4779]: I0929 09:39:15.594226 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" event={"ID":"e9334f2e-1314-4e4a-9bc1-f1c5a241584e","Type":"ContainerStarted","Data":"38c3550bbcb526f460b8cff6d86eeebd7534f07c05331cb2f131f76009add1d8"} Sep 29 09:39:15 crc kubenswrapper[4779]: I0929 09:39:15.594247 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" event={"ID":"e9334f2e-1314-4e4a-9bc1-f1c5a241584e","Type":"ContainerStarted","Data":"3eed85fcb4f9cf256a4eecb4df4126391ff6d9a45953c1a866c5b3f7c6c36a46"} Sep 29 09:39:15 crc kubenswrapper[4779]: I0929 09:39:15.594265 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" event={"ID":"e9334f2e-1314-4e4a-9bc1-f1c5a241584e","Type":"ContainerStarted","Data":"bcefec8542d193017ccb7961bcc6a55e769f6c578e9e214b5649d319f7b388d9"} Sep 29 09:39:15 crc kubenswrapper[4779]: I0929 09:39:15.594282 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" event={"ID":"e9334f2e-1314-4e4a-9bc1-f1c5a241584e","Type":"ContainerStarted","Data":"13e4a2ce37577deebea02d039415ea268805a056bd1727c85b8248660c3745ac"} Sep 29 09:39:15 crc kubenswrapper[4779]: I0929 09:39:15.594299 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" event={"ID":"e9334f2e-1314-4e4a-9bc1-f1c5a241584e","Type":"ContainerStarted","Data":"a5f2fe085691de496e54b35f38b5eec5ed29f2ff7ff6fbb5d2df486c36edd680"} Sep 29 09:39:16 crc kubenswrapper[4779]: I0929 09:39:16.966253 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 09:39:16 crc kubenswrapper[4779]: I0929 09:39:16.966693 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 09:39:17 crc kubenswrapper[4779]: I0929 09:39:17.608327 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" event={"ID":"e9334f2e-1314-4e4a-9bc1-f1c5a241584e","Type":"ContainerStarted","Data":"2dd166106e5c397ef6d4c012e34a9f441decb53255aee95e1a64738434fd80b8"} Sep 29 09:39:20 crc kubenswrapper[4779]: I0929 09:39:20.633351 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" event={"ID":"e9334f2e-1314-4e4a-9bc1-f1c5a241584e","Type":"ContainerStarted","Data":"9d1bd4a493a91157edc847185ecd5babfa762bd516036f91f922fb1d6489ba7b"} Sep 29 09:39:20 crc kubenswrapper[4779]: I0929 09:39:20.633843 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:20 crc kubenswrapper[4779]: I0929 09:39:20.633854 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:20 crc kubenswrapper[4779]: I0929 09:39:20.633864 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:20 crc kubenswrapper[4779]: I0929 09:39:20.661301 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" podStartSLOduration=7.66128233 podStartE2EDuration="7.66128233s" podCreationTimestamp="2025-09-29 09:39:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:39:20.66028403 +0000 UTC m=+592.641608014" watchObservedRunningTime="2025-09-29 09:39:20.66128233 +0000 UTC m=+592.642606234" Sep 29 09:39:20 crc kubenswrapper[4779]: I0929 09:39:20.667262 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:20 crc kubenswrapper[4779]: I0929 09:39:20.672164 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:25 crc kubenswrapper[4779]: I0929 09:39:25.714445 4779 scope.go:117] "RemoveContainer" containerID="4bb23b9d833a44d610758b9d19cba1ff80274c767464a459a50ef01121718270" Sep 29 09:39:25 crc kubenswrapper[4779]: E0929 09:39:25.715389 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-f2tkr_openshift-multus(6b0e23f7-a478-48e2-a745-193a90e87553)\"" pod="openshift-multus/multus-f2tkr" podUID="6b0e23f7-a478-48e2-a745-193a90e87553" Sep 29 09:39:28 crc kubenswrapper[4779]: I0929 09:39:28.929490 4779 scope.go:117] "RemoveContainer" containerID="10bf019bbe72b02d7f7545cd6f01bde45c32ff2222682d62dac1992cfcdca115" Sep 29 09:39:29 crc kubenswrapper[4779]: I0929 09:39:29.688299 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-f2tkr_6b0e23f7-a478-48e2-a745-193a90e87553/kube-multus/2.log" Sep 29 09:39:36 crc kubenswrapper[4779]: I0929 09:39:36.714695 4779 scope.go:117] "RemoveContainer" containerID="4bb23b9d833a44d610758b9d19cba1ff80274c767464a459a50ef01121718270" Sep 29 09:39:37 crc kubenswrapper[4779]: I0929 09:39:37.737059 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-f2tkr_6b0e23f7-a478-48e2-a745-193a90e87553/kube-multus/2.log" Sep 29 09:39:37 crc kubenswrapper[4779]: I0929 09:39:37.737442 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-f2tkr" event={"ID":"6b0e23f7-a478-48e2-a745-193a90e87553","Type":"ContainerStarted","Data":"262628323adfce6d22201f8678571a708be74419666c98563145a3d229e5e0e8"} Sep 29 09:39:41 crc kubenswrapper[4779]: I0929 09:39:41.359727 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtdbfw"] Sep 29 09:39:41 crc kubenswrapper[4779]: I0929 09:39:41.361199 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtdbfw" Sep 29 09:39:41 crc kubenswrapper[4779]: I0929 09:39:41.366697 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Sep 29 09:39:41 crc kubenswrapper[4779]: I0929 09:39:41.373264 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtdbfw"] Sep 29 09:39:41 crc kubenswrapper[4779]: I0929 09:39:41.513006 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4a2dd05c-f021-48d2-9c7a-092ca80dca01-util\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtdbfw\" (UID: \"4a2dd05c-f021-48d2-9c7a-092ca80dca01\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtdbfw" Sep 29 09:39:41 crc kubenswrapper[4779]: I0929 09:39:41.513075 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4a2dd05c-f021-48d2-9c7a-092ca80dca01-bundle\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtdbfw\" (UID: \"4a2dd05c-f021-48d2-9c7a-092ca80dca01\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtdbfw" Sep 29 09:39:41 crc kubenswrapper[4779]: I0929 09:39:41.513154 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rz449\" (UniqueName: \"kubernetes.io/projected/4a2dd05c-f021-48d2-9c7a-092ca80dca01-kube-api-access-rz449\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtdbfw\" (UID: \"4a2dd05c-f021-48d2-9c7a-092ca80dca01\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtdbfw" Sep 29 09:39:41 crc kubenswrapper[4779]: I0929 09:39:41.614695 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rz449\" (UniqueName: \"kubernetes.io/projected/4a2dd05c-f021-48d2-9c7a-092ca80dca01-kube-api-access-rz449\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtdbfw\" (UID: \"4a2dd05c-f021-48d2-9c7a-092ca80dca01\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtdbfw" Sep 29 09:39:41 crc kubenswrapper[4779]: I0929 09:39:41.615173 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4a2dd05c-f021-48d2-9c7a-092ca80dca01-util\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtdbfw\" (UID: \"4a2dd05c-f021-48d2-9c7a-092ca80dca01\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtdbfw" Sep 29 09:39:41 crc kubenswrapper[4779]: I0929 09:39:41.615383 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4a2dd05c-f021-48d2-9c7a-092ca80dca01-bundle\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtdbfw\" (UID: \"4a2dd05c-f021-48d2-9c7a-092ca80dca01\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtdbfw" Sep 29 09:39:41 crc kubenswrapper[4779]: I0929 09:39:41.615612 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4a2dd05c-f021-48d2-9c7a-092ca80dca01-util\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtdbfw\" (UID: \"4a2dd05c-f021-48d2-9c7a-092ca80dca01\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtdbfw" Sep 29 09:39:41 crc kubenswrapper[4779]: I0929 09:39:41.615965 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4a2dd05c-f021-48d2-9c7a-092ca80dca01-bundle\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtdbfw\" (UID: \"4a2dd05c-f021-48d2-9c7a-092ca80dca01\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtdbfw" Sep 29 09:39:41 crc kubenswrapper[4779]: I0929 09:39:41.634422 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rz449\" (UniqueName: \"kubernetes.io/projected/4a2dd05c-f021-48d2-9c7a-092ca80dca01-kube-api-access-rz449\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtdbfw\" (UID: \"4a2dd05c-f021-48d2-9c7a-092ca80dca01\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtdbfw" Sep 29 09:39:41 crc kubenswrapper[4779]: I0929 09:39:41.717129 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtdbfw" Sep 29 09:39:41 crc kubenswrapper[4779]: I0929 09:39:41.944322 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtdbfw"] Sep 29 09:39:42 crc kubenswrapper[4779]: I0929 09:39:42.769441 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtdbfw" event={"ID":"4a2dd05c-f021-48d2-9c7a-092ca80dca01","Type":"ContainerStarted","Data":"9634acf31e908b1f10e5832be6485f7c4429c9ac0b8dc89e99104917bf79a232"} Sep 29 09:39:42 crc kubenswrapper[4779]: I0929 09:39:42.769785 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtdbfw" event={"ID":"4a2dd05c-f021-48d2-9c7a-092ca80dca01","Type":"ContainerStarted","Data":"d47934a21cdd867b617673b19bd01f4f8ee4f470f5652044fd44c4e59f726f70"} Sep 29 09:39:43 crc kubenswrapper[4779]: I0929 09:39:43.599760 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-zw4c8" Sep 29 09:39:43 crc kubenswrapper[4779]: I0929 09:39:43.776432 4779 generic.go:334] "Generic (PLEG): container finished" podID="4a2dd05c-f021-48d2-9c7a-092ca80dca01" containerID="9634acf31e908b1f10e5832be6485f7c4429c9ac0b8dc89e99104917bf79a232" exitCode=0 Sep 29 09:39:43 crc kubenswrapper[4779]: I0929 09:39:43.776493 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtdbfw" event={"ID":"4a2dd05c-f021-48d2-9c7a-092ca80dca01","Type":"ContainerDied","Data":"9634acf31e908b1f10e5832be6485f7c4429c9ac0b8dc89e99104917bf79a232"} Sep 29 09:39:45 crc kubenswrapper[4779]: I0929 09:39:45.791218 4779 generic.go:334] "Generic (PLEG): container finished" podID="4a2dd05c-f021-48d2-9c7a-092ca80dca01" containerID="858dbefa2d371e279c874a65cab9c8554b07dd9761a39e93c376465abc49dd82" exitCode=0 Sep 29 09:39:45 crc kubenswrapper[4779]: I0929 09:39:45.791328 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtdbfw" event={"ID":"4a2dd05c-f021-48d2-9c7a-092ca80dca01","Type":"ContainerDied","Data":"858dbefa2d371e279c874a65cab9c8554b07dd9761a39e93c376465abc49dd82"} Sep 29 09:39:46 crc kubenswrapper[4779]: I0929 09:39:46.800931 4779 generic.go:334] "Generic (PLEG): container finished" podID="4a2dd05c-f021-48d2-9c7a-092ca80dca01" containerID="df398f5aac77a8288858c37f2f57bff8e0048d9a059659671617abaa9a9dafed" exitCode=0 Sep 29 09:39:46 crc kubenswrapper[4779]: I0929 09:39:46.801515 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtdbfw" event={"ID":"4a2dd05c-f021-48d2-9c7a-092ca80dca01","Type":"ContainerDied","Data":"df398f5aac77a8288858c37f2f57bff8e0048d9a059659671617abaa9a9dafed"} Sep 29 09:39:46 crc kubenswrapper[4779]: I0929 09:39:46.966578 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 09:39:46 crc kubenswrapper[4779]: I0929 09:39:46.966662 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 09:39:48 crc kubenswrapper[4779]: I0929 09:39:48.079681 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtdbfw" Sep 29 09:39:48 crc kubenswrapper[4779]: I0929 09:39:48.191967 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rz449\" (UniqueName: \"kubernetes.io/projected/4a2dd05c-f021-48d2-9c7a-092ca80dca01-kube-api-access-rz449\") pod \"4a2dd05c-f021-48d2-9c7a-092ca80dca01\" (UID: \"4a2dd05c-f021-48d2-9c7a-092ca80dca01\") " Sep 29 09:39:48 crc kubenswrapper[4779]: I0929 09:39:48.192034 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4a2dd05c-f021-48d2-9c7a-092ca80dca01-util\") pod \"4a2dd05c-f021-48d2-9c7a-092ca80dca01\" (UID: \"4a2dd05c-f021-48d2-9c7a-092ca80dca01\") " Sep 29 09:39:48 crc kubenswrapper[4779]: I0929 09:39:48.192098 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4a2dd05c-f021-48d2-9c7a-092ca80dca01-bundle\") pod \"4a2dd05c-f021-48d2-9c7a-092ca80dca01\" (UID: \"4a2dd05c-f021-48d2-9c7a-092ca80dca01\") " Sep 29 09:39:48 crc kubenswrapper[4779]: I0929 09:39:48.195366 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4a2dd05c-f021-48d2-9c7a-092ca80dca01-bundle" (OuterVolumeSpecName: "bundle") pod "4a2dd05c-f021-48d2-9c7a-092ca80dca01" (UID: "4a2dd05c-f021-48d2-9c7a-092ca80dca01"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:39:48 crc kubenswrapper[4779]: I0929 09:39:48.199092 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a2dd05c-f021-48d2-9c7a-092ca80dca01-kube-api-access-rz449" (OuterVolumeSpecName: "kube-api-access-rz449") pod "4a2dd05c-f021-48d2-9c7a-092ca80dca01" (UID: "4a2dd05c-f021-48d2-9c7a-092ca80dca01"). InnerVolumeSpecName "kube-api-access-rz449". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:39:48 crc kubenswrapper[4779]: I0929 09:39:48.206619 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4a2dd05c-f021-48d2-9c7a-092ca80dca01-util" (OuterVolumeSpecName: "util") pod "4a2dd05c-f021-48d2-9c7a-092ca80dca01" (UID: "4a2dd05c-f021-48d2-9c7a-092ca80dca01"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:39:48 crc kubenswrapper[4779]: I0929 09:39:48.293862 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rz449\" (UniqueName: \"kubernetes.io/projected/4a2dd05c-f021-48d2-9c7a-092ca80dca01-kube-api-access-rz449\") on node \"crc\" DevicePath \"\"" Sep 29 09:39:48 crc kubenswrapper[4779]: I0929 09:39:48.293890 4779 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4a2dd05c-f021-48d2-9c7a-092ca80dca01-util\") on node \"crc\" DevicePath \"\"" Sep 29 09:39:48 crc kubenswrapper[4779]: I0929 09:39:48.293917 4779 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4a2dd05c-f021-48d2-9c7a-092ca80dca01-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 09:39:48 crc kubenswrapper[4779]: I0929 09:39:48.814192 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtdbfw" event={"ID":"4a2dd05c-f021-48d2-9c7a-092ca80dca01","Type":"ContainerDied","Data":"d47934a21cdd867b617673b19bd01f4f8ee4f470f5652044fd44c4e59f726f70"} Sep 29 09:39:48 crc kubenswrapper[4779]: I0929 09:39:48.814234 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d47934a21cdd867b617673b19bd01f4f8ee4f470f5652044fd44c4e59f726f70" Sep 29 09:39:48 crc kubenswrapper[4779]: I0929 09:39:48.814271 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtdbfw" Sep 29 09:39:58 crc kubenswrapper[4779]: I0929 09:39:58.281595 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-7c8cf85677-vgkdn"] Sep 29 09:39:58 crc kubenswrapper[4779]: E0929 09:39:58.282527 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a2dd05c-f021-48d2-9c7a-092ca80dca01" containerName="pull" Sep 29 09:39:58 crc kubenswrapper[4779]: I0929 09:39:58.282550 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a2dd05c-f021-48d2-9c7a-092ca80dca01" containerName="pull" Sep 29 09:39:58 crc kubenswrapper[4779]: E0929 09:39:58.282563 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a2dd05c-f021-48d2-9c7a-092ca80dca01" containerName="extract" Sep 29 09:39:58 crc kubenswrapper[4779]: I0929 09:39:58.282573 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a2dd05c-f021-48d2-9c7a-092ca80dca01" containerName="extract" Sep 29 09:39:58 crc kubenswrapper[4779]: E0929 09:39:58.282601 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a2dd05c-f021-48d2-9c7a-092ca80dca01" containerName="util" Sep 29 09:39:58 crc kubenswrapper[4779]: I0929 09:39:58.282614 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a2dd05c-f021-48d2-9c7a-092ca80dca01" containerName="util" Sep 29 09:39:58 crc kubenswrapper[4779]: I0929 09:39:58.282748 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a2dd05c-f021-48d2-9c7a-092ca80dca01" containerName="extract" Sep 29 09:39:58 crc kubenswrapper[4779]: I0929 09:39:58.283264 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-vgkdn" Sep 29 09:39:58 crc kubenswrapper[4779]: I0929 09:39:58.285583 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"openshift-service-ca.crt" Sep 29 09:39:58 crc kubenswrapper[4779]: I0929 09:39:58.285914 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"kube-root-ca.crt" Sep 29 09:39:58 crc kubenswrapper[4779]: I0929 09:39:58.288458 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-dockercfg-dvkxx" Sep 29 09:39:58 crc kubenswrapper[4779]: I0929 09:39:58.295533 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-7c8cf85677-vgkdn"] Sep 29 09:39:58 crc kubenswrapper[4779]: I0929 09:39:58.394911 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-69fc696f78-8fjp6"] Sep 29 09:39:58 crc kubenswrapper[4779]: I0929 09:39:58.395501 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-69fc696f78-8fjp6" Sep 29 09:39:58 crc kubenswrapper[4779]: I0929 09:39:58.397725 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-service-cert" Sep 29 09:39:58 crc kubenswrapper[4779]: I0929 09:39:58.398114 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-dockercfg-lvw5h" Sep 29 09:39:58 crc kubenswrapper[4779]: I0929 09:39:58.413577 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-69fc696f78-52x4j"] Sep 29 09:39:58 crc kubenswrapper[4779]: I0929 09:39:58.414330 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-69fc696f78-8fjp6"] Sep 29 09:39:58 crc kubenswrapper[4779]: I0929 09:39:58.414408 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-69fc696f78-52x4j" Sep 29 09:39:58 crc kubenswrapper[4779]: I0929 09:39:58.422784 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4lzn4\" (UniqueName: \"kubernetes.io/projected/31f19cee-69f8-4016-9025-35a73798bc5f-kube-api-access-4lzn4\") pod \"obo-prometheus-operator-7c8cf85677-vgkdn\" (UID: \"31f19cee-69f8-4016-9025-35a73798bc5f\") " pod="openshift-operators/obo-prometheus-operator-7c8cf85677-vgkdn" Sep 29 09:39:58 crc kubenswrapper[4779]: I0929 09:39:58.444992 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-69fc696f78-52x4j"] Sep 29 09:39:58 crc kubenswrapper[4779]: I0929 09:39:58.511105 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/observability-operator-cc5f78dfc-42c95"] Sep 29 09:39:58 crc kubenswrapper[4779]: I0929 09:39:58.511881 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-cc5f78dfc-42c95" Sep 29 09:39:58 crc kubenswrapper[4779]: I0929 09:39:58.514467 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-tls" Sep 29 09:39:58 crc kubenswrapper[4779]: I0929 09:39:58.514603 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-sa-dockercfg-2t9g7" Sep 29 09:39:58 crc kubenswrapper[4779]: I0929 09:39:58.523819 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c314497a-d9a7-4364-93cd-924a0b1f2de4-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-69fc696f78-52x4j\" (UID: \"c314497a-d9a7-4364-93cd-924a0b1f2de4\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-69fc696f78-52x4j" Sep 29 09:39:58 crc kubenswrapper[4779]: I0929 09:39:58.523849 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c314497a-d9a7-4364-93cd-924a0b1f2de4-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-69fc696f78-52x4j\" (UID: \"c314497a-d9a7-4364-93cd-924a0b1f2de4\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-69fc696f78-52x4j" Sep 29 09:39:58 crc kubenswrapper[4779]: I0929 09:39:58.523939 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/588bdbef-217b-4bf0-84ba-ae1d0fb8a80b-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-69fc696f78-8fjp6\" (UID: \"588bdbef-217b-4bf0-84ba-ae1d0fb8a80b\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-69fc696f78-8fjp6" Sep 29 09:39:58 crc kubenswrapper[4779]: I0929 09:39:58.523971 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4lzn4\" (UniqueName: \"kubernetes.io/projected/31f19cee-69f8-4016-9025-35a73798bc5f-kube-api-access-4lzn4\") pod \"obo-prometheus-operator-7c8cf85677-vgkdn\" (UID: \"31f19cee-69f8-4016-9025-35a73798bc5f\") " pod="openshift-operators/obo-prometheus-operator-7c8cf85677-vgkdn" Sep 29 09:39:58 crc kubenswrapper[4779]: I0929 09:39:58.523999 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/588bdbef-217b-4bf0-84ba-ae1d0fb8a80b-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-69fc696f78-8fjp6\" (UID: \"588bdbef-217b-4bf0-84ba-ae1d0fb8a80b\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-69fc696f78-8fjp6" Sep 29 09:39:58 crc kubenswrapper[4779]: I0929 09:39:58.524692 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-cc5f78dfc-42c95"] Sep 29 09:39:58 crc kubenswrapper[4779]: I0929 09:39:58.554652 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4lzn4\" (UniqueName: \"kubernetes.io/projected/31f19cee-69f8-4016-9025-35a73798bc5f-kube-api-access-4lzn4\") pod \"obo-prometheus-operator-7c8cf85677-vgkdn\" (UID: \"31f19cee-69f8-4016-9025-35a73798bc5f\") " pod="openshift-operators/obo-prometheus-operator-7c8cf85677-vgkdn" Sep 29 09:39:58 crc kubenswrapper[4779]: I0929 09:39:58.595890 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/perses-operator-54bc95c9fb-9s4b8"] Sep 29 09:39:58 crc kubenswrapper[4779]: I0929 09:39:58.596506 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-54bc95c9fb-9s4b8" Sep 29 09:39:58 crc kubenswrapper[4779]: I0929 09:39:58.597053 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-vgkdn" Sep 29 09:39:58 crc kubenswrapper[4779]: I0929 09:39:58.601457 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"perses-operator-dockercfg-d8ztz" Sep 29 09:39:58 crc kubenswrapper[4779]: I0929 09:39:58.611130 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-54bc95c9fb-9s4b8"] Sep 29 09:39:58 crc kubenswrapper[4779]: I0929 09:39:58.625503 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/588bdbef-217b-4bf0-84ba-ae1d0fb8a80b-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-69fc696f78-8fjp6\" (UID: \"588bdbef-217b-4bf0-84ba-ae1d0fb8a80b\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-69fc696f78-8fjp6" Sep 29 09:39:58 crc kubenswrapper[4779]: I0929 09:39:58.625559 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-smmgk\" (UniqueName: \"kubernetes.io/projected/d21259d9-d1c5-4d70-815d-045f775f09bc-kube-api-access-smmgk\") pod \"observability-operator-cc5f78dfc-42c95\" (UID: \"d21259d9-d1c5-4d70-815d-045f775f09bc\") " pod="openshift-operators/observability-operator-cc5f78dfc-42c95" Sep 29 09:39:58 crc kubenswrapper[4779]: I0929 09:39:58.625599 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/588bdbef-217b-4bf0-84ba-ae1d0fb8a80b-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-69fc696f78-8fjp6\" (UID: \"588bdbef-217b-4bf0-84ba-ae1d0fb8a80b\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-69fc696f78-8fjp6" Sep 29 09:39:58 crc kubenswrapper[4779]: I0929 09:39:58.625622 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/d21259d9-d1c5-4d70-815d-045f775f09bc-observability-operator-tls\") pod \"observability-operator-cc5f78dfc-42c95\" (UID: \"d21259d9-d1c5-4d70-815d-045f775f09bc\") " pod="openshift-operators/observability-operator-cc5f78dfc-42c95" Sep 29 09:39:58 crc kubenswrapper[4779]: I0929 09:39:58.625651 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c314497a-d9a7-4364-93cd-924a0b1f2de4-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-69fc696f78-52x4j\" (UID: \"c314497a-d9a7-4364-93cd-924a0b1f2de4\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-69fc696f78-52x4j" Sep 29 09:39:58 crc kubenswrapper[4779]: I0929 09:39:58.625670 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c314497a-d9a7-4364-93cd-924a0b1f2de4-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-69fc696f78-52x4j\" (UID: \"c314497a-d9a7-4364-93cd-924a0b1f2de4\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-69fc696f78-52x4j" Sep 29 09:39:58 crc kubenswrapper[4779]: I0929 09:39:58.631304 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/588bdbef-217b-4bf0-84ba-ae1d0fb8a80b-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-69fc696f78-8fjp6\" (UID: \"588bdbef-217b-4bf0-84ba-ae1d0fb8a80b\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-69fc696f78-8fjp6" Sep 29 09:39:58 crc kubenswrapper[4779]: I0929 09:39:58.631628 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c314497a-d9a7-4364-93cd-924a0b1f2de4-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-69fc696f78-52x4j\" (UID: \"c314497a-d9a7-4364-93cd-924a0b1f2de4\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-69fc696f78-52x4j" Sep 29 09:39:58 crc kubenswrapper[4779]: I0929 09:39:58.632254 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c314497a-d9a7-4364-93cd-924a0b1f2de4-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-69fc696f78-52x4j\" (UID: \"c314497a-d9a7-4364-93cd-924a0b1f2de4\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-69fc696f78-52x4j" Sep 29 09:39:58 crc kubenswrapper[4779]: I0929 09:39:58.632659 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/588bdbef-217b-4bf0-84ba-ae1d0fb8a80b-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-69fc696f78-8fjp6\" (UID: \"588bdbef-217b-4bf0-84ba-ae1d0fb8a80b\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-69fc696f78-8fjp6" Sep 29 09:39:58 crc kubenswrapper[4779]: I0929 09:39:58.707192 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-69fc696f78-8fjp6" Sep 29 09:39:58 crc kubenswrapper[4779]: I0929 09:39:58.729442 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/030213d9-0575-48e7-b472-dd72d07ecbc8-openshift-service-ca\") pod \"perses-operator-54bc95c9fb-9s4b8\" (UID: \"030213d9-0575-48e7-b472-dd72d07ecbc8\") " pod="openshift-operators/perses-operator-54bc95c9fb-9s4b8" Sep 29 09:39:58 crc kubenswrapper[4779]: I0929 09:39:58.729505 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vtrnf\" (UniqueName: \"kubernetes.io/projected/030213d9-0575-48e7-b472-dd72d07ecbc8-kube-api-access-vtrnf\") pod \"perses-operator-54bc95c9fb-9s4b8\" (UID: \"030213d9-0575-48e7-b472-dd72d07ecbc8\") " pod="openshift-operators/perses-operator-54bc95c9fb-9s4b8" Sep 29 09:39:58 crc kubenswrapper[4779]: I0929 09:39:58.729557 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-smmgk\" (UniqueName: \"kubernetes.io/projected/d21259d9-d1c5-4d70-815d-045f775f09bc-kube-api-access-smmgk\") pod \"observability-operator-cc5f78dfc-42c95\" (UID: \"d21259d9-d1c5-4d70-815d-045f775f09bc\") " pod="openshift-operators/observability-operator-cc5f78dfc-42c95" Sep 29 09:39:58 crc kubenswrapper[4779]: I0929 09:39:58.729585 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/d21259d9-d1c5-4d70-815d-045f775f09bc-observability-operator-tls\") pod \"observability-operator-cc5f78dfc-42c95\" (UID: \"d21259d9-d1c5-4d70-815d-045f775f09bc\") " pod="openshift-operators/observability-operator-cc5f78dfc-42c95" Sep 29 09:39:58 crc kubenswrapper[4779]: I0929 09:39:58.730436 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-69fc696f78-52x4j" Sep 29 09:39:58 crc kubenswrapper[4779]: I0929 09:39:58.734923 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/d21259d9-d1c5-4d70-815d-045f775f09bc-observability-operator-tls\") pod \"observability-operator-cc5f78dfc-42c95\" (UID: \"d21259d9-d1c5-4d70-815d-045f775f09bc\") " pod="openshift-operators/observability-operator-cc5f78dfc-42c95" Sep 29 09:39:58 crc kubenswrapper[4779]: I0929 09:39:58.758857 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-smmgk\" (UniqueName: \"kubernetes.io/projected/d21259d9-d1c5-4d70-815d-045f775f09bc-kube-api-access-smmgk\") pod \"observability-operator-cc5f78dfc-42c95\" (UID: \"d21259d9-d1c5-4d70-815d-045f775f09bc\") " pod="openshift-operators/observability-operator-cc5f78dfc-42c95" Sep 29 09:39:58 crc kubenswrapper[4779]: I0929 09:39:58.827310 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-cc5f78dfc-42c95" Sep 29 09:39:58 crc kubenswrapper[4779]: I0929 09:39:58.832312 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vtrnf\" (UniqueName: \"kubernetes.io/projected/030213d9-0575-48e7-b472-dd72d07ecbc8-kube-api-access-vtrnf\") pod \"perses-operator-54bc95c9fb-9s4b8\" (UID: \"030213d9-0575-48e7-b472-dd72d07ecbc8\") " pod="openshift-operators/perses-operator-54bc95c9fb-9s4b8" Sep 29 09:39:58 crc kubenswrapper[4779]: I0929 09:39:58.832446 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/030213d9-0575-48e7-b472-dd72d07ecbc8-openshift-service-ca\") pod \"perses-operator-54bc95c9fb-9s4b8\" (UID: \"030213d9-0575-48e7-b472-dd72d07ecbc8\") " pod="openshift-operators/perses-operator-54bc95c9fb-9s4b8" Sep 29 09:39:58 crc kubenswrapper[4779]: I0929 09:39:58.833184 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/030213d9-0575-48e7-b472-dd72d07ecbc8-openshift-service-ca\") pod \"perses-operator-54bc95c9fb-9s4b8\" (UID: \"030213d9-0575-48e7-b472-dd72d07ecbc8\") " pod="openshift-operators/perses-operator-54bc95c9fb-9s4b8" Sep 29 09:39:58 crc kubenswrapper[4779]: I0929 09:39:58.863994 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vtrnf\" (UniqueName: \"kubernetes.io/projected/030213d9-0575-48e7-b472-dd72d07ecbc8-kube-api-access-vtrnf\") pod \"perses-operator-54bc95c9fb-9s4b8\" (UID: \"030213d9-0575-48e7-b472-dd72d07ecbc8\") " pod="openshift-operators/perses-operator-54bc95c9fb-9s4b8" Sep 29 09:39:58 crc kubenswrapper[4779]: I0929 09:39:58.967854 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-54bc95c9fb-9s4b8" Sep 29 09:39:58 crc kubenswrapper[4779]: I0929 09:39:58.983089 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-69fc696f78-8fjp6"] Sep 29 09:39:59 crc kubenswrapper[4779]: W0929 09:39:59.000595 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod588bdbef_217b_4bf0_84ba_ae1d0fb8a80b.slice/crio-3ca446b7f77c5ed49a31ec904d5b5db89ea59b229505dde1968189efc134fd16 WatchSource:0}: Error finding container 3ca446b7f77c5ed49a31ec904d5b5db89ea59b229505dde1968189efc134fd16: Status 404 returned error can't find the container with id 3ca446b7f77c5ed49a31ec904d5b5db89ea59b229505dde1968189efc134fd16 Sep 29 09:39:59 crc kubenswrapper[4779]: I0929 09:39:59.083051 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-69fc696f78-52x4j"] Sep 29 09:39:59 crc kubenswrapper[4779]: I0929 09:39:59.123133 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-7c8cf85677-vgkdn"] Sep 29 09:39:59 crc kubenswrapper[4779]: I0929 09:39:59.319121 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-cc5f78dfc-42c95"] Sep 29 09:39:59 crc kubenswrapper[4779]: W0929 09:39:59.323697 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd21259d9_d1c5_4d70_815d_045f775f09bc.slice/crio-657d7052c22d8d350e704b3f68ff91ca78d5cc8fe1fd0a2b763ec694c90367f8 WatchSource:0}: Error finding container 657d7052c22d8d350e704b3f68ff91ca78d5cc8fe1fd0a2b763ec694c90367f8: Status 404 returned error can't find the container with id 657d7052c22d8d350e704b3f68ff91ca78d5cc8fe1fd0a2b763ec694c90367f8 Sep 29 09:39:59 crc kubenswrapper[4779]: W0929 09:39:59.404775 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod030213d9_0575_48e7_b472_dd72d07ecbc8.slice/crio-abbc1bcb826a6a669e3287744ed54bc75e0e10b42214327a1527da7d76732c00 WatchSource:0}: Error finding container abbc1bcb826a6a669e3287744ed54bc75e0e10b42214327a1527da7d76732c00: Status 404 returned error can't find the container with id abbc1bcb826a6a669e3287744ed54bc75e0e10b42214327a1527da7d76732c00 Sep 29 09:39:59 crc kubenswrapper[4779]: I0929 09:39:59.405106 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-54bc95c9fb-9s4b8"] Sep 29 09:39:59 crc kubenswrapper[4779]: I0929 09:39:59.874766 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-69fc696f78-52x4j" event={"ID":"c314497a-d9a7-4364-93cd-924a0b1f2de4","Type":"ContainerStarted","Data":"5db2c3b3d42fb04d6fc847799914fc22a67c4c976344949d4df43d47595fbfe5"} Sep 29 09:39:59 crc kubenswrapper[4779]: I0929 09:39:59.875788 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-69fc696f78-8fjp6" event={"ID":"588bdbef-217b-4bf0-84ba-ae1d0fb8a80b","Type":"ContainerStarted","Data":"3ca446b7f77c5ed49a31ec904d5b5db89ea59b229505dde1968189efc134fd16"} Sep 29 09:39:59 crc kubenswrapper[4779]: I0929 09:39:59.876552 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-54bc95c9fb-9s4b8" event={"ID":"030213d9-0575-48e7-b472-dd72d07ecbc8","Type":"ContainerStarted","Data":"abbc1bcb826a6a669e3287744ed54bc75e0e10b42214327a1527da7d76732c00"} Sep 29 09:39:59 crc kubenswrapper[4779]: I0929 09:39:59.877404 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-cc5f78dfc-42c95" event={"ID":"d21259d9-d1c5-4d70-815d-045f775f09bc","Type":"ContainerStarted","Data":"657d7052c22d8d350e704b3f68ff91ca78d5cc8fe1fd0a2b763ec694c90367f8"} Sep 29 09:39:59 crc kubenswrapper[4779]: I0929 09:39:59.878221 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-vgkdn" event={"ID":"31f19cee-69f8-4016-9025-35a73798bc5f","Type":"ContainerStarted","Data":"7931f445b15fdbb47f49a37a1d73a853a2e15525c22fd2040d390f95eb322e9c"} Sep 29 09:40:10 crc kubenswrapper[4779]: I0929 09:40:10.980726 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-69fc696f78-52x4j" event={"ID":"c314497a-d9a7-4364-93cd-924a0b1f2de4","Type":"ContainerStarted","Data":"de7cae9d350e420ece93b8caae9fe6adf7193505c1ec4cfdd5d93ff4eaddec44"} Sep 29 09:40:10 crc kubenswrapper[4779]: I0929 09:40:10.982657 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-69fc696f78-8fjp6" event={"ID":"588bdbef-217b-4bf0-84ba-ae1d0fb8a80b","Type":"ContainerStarted","Data":"f582f8a38f9b012403a93fec1dd3c8e3a44753e18188d76605ddc4db92f78b81"} Sep 29 09:40:10 crc kubenswrapper[4779]: I0929 09:40:10.984382 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-cc5f78dfc-42c95" event={"ID":"d21259d9-d1c5-4d70-815d-045f775f09bc","Type":"ContainerStarted","Data":"491f54a2a3ca1de4bd36ccb6afcc26d307922d84dc445d4f391e25c4fd8a7a79"} Sep 29 09:40:10 crc kubenswrapper[4779]: I0929 09:40:10.984598 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/observability-operator-cc5f78dfc-42c95" Sep 29 09:40:10 crc kubenswrapper[4779]: I0929 09:40:10.985893 4779 patch_prober.go:28] interesting pod/observability-operator-cc5f78dfc-42c95 container/operator namespace/openshift-operators: Readiness probe status=failure output="Get \"http://10.217.0.44:8081/healthz\": dial tcp 10.217.0.44:8081: connect: connection refused" start-of-body= Sep 29 09:40:10 crc kubenswrapper[4779]: I0929 09:40:10.985949 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operators/observability-operator-cc5f78dfc-42c95" podUID="d21259d9-d1c5-4d70-815d-045f775f09bc" containerName="operator" probeResult="failure" output="Get \"http://10.217.0.44:8081/healthz\": dial tcp 10.217.0.44:8081: connect: connection refused" Sep 29 09:40:10 crc kubenswrapper[4779]: I0929 09:40:10.986122 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-54bc95c9fb-9s4b8" event={"ID":"030213d9-0575-48e7-b472-dd72d07ecbc8","Type":"ContainerStarted","Data":"c4fafa9591a65f3b8cc039ad4281f9c1fd902fe21ef649e8fbabd53bcf3ba2a1"} Sep 29 09:40:10 crc kubenswrapper[4779]: I0929 09:40:10.986297 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/perses-operator-54bc95c9fb-9s4b8" Sep 29 09:40:10 crc kubenswrapper[4779]: I0929 09:40:10.987807 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-vgkdn" event={"ID":"31f19cee-69f8-4016-9025-35a73798bc5f","Type":"ContainerStarted","Data":"80d964d830ed1535e91ca6fa92836343da9aa52bfc7d461f768648f57bb355fe"} Sep 29 09:40:11 crc kubenswrapper[4779]: I0929 09:40:11.000288 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-69fc696f78-52x4j" podStartSLOduration=1.5190591580000001 podStartE2EDuration="13.000270968s" podCreationTimestamp="2025-09-29 09:39:58 +0000 UTC" firstStartedPulling="2025-09-29 09:39:59.115373916 +0000 UTC m=+631.096697820" lastFinishedPulling="2025-09-29 09:40:10.596585706 +0000 UTC m=+642.577909630" observedRunningTime="2025-09-29 09:40:10.999272228 +0000 UTC m=+642.980596142" watchObservedRunningTime="2025-09-29 09:40:11.000270968 +0000 UTC m=+642.981594882" Sep 29 09:40:11 crc kubenswrapper[4779]: I0929 09:40:11.031419 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-69fc696f78-8fjp6" podStartSLOduration=1.399758072 podStartE2EDuration="13.031401288s" podCreationTimestamp="2025-09-29 09:39:58 +0000 UTC" firstStartedPulling="2025-09-29 09:39:59.003832669 +0000 UTC m=+630.985156573" lastFinishedPulling="2025-09-29 09:40:10.635475885 +0000 UTC m=+642.616799789" observedRunningTime="2025-09-29 09:40:11.03044552 +0000 UTC m=+643.011769424" watchObservedRunningTime="2025-09-29 09:40:11.031401288 +0000 UTC m=+643.012725192" Sep 29 09:40:11 crc kubenswrapper[4779]: I0929 09:40:11.057527 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/perses-operator-54bc95c9fb-9s4b8" podStartSLOduration=1.8289110069999999 podStartE2EDuration="13.05750796s" podCreationTimestamp="2025-09-29 09:39:58 +0000 UTC" firstStartedPulling="2025-09-29 09:39:59.407538592 +0000 UTC m=+631.388862496" lastFinishedPulling="2025-09-29 09:40:10.636135545 +0000 UTC m=+642.617459449" observedRunningTime="2025-09-29 09:40:11.055410768 +0000 UTC m=+643.036734672" watchObservedRunningTime="2025-09-29 09:40:11.05750796 +0000 UTC m=+643.038831864" Sep 29 09:40:11 crc kubenswrapper[4779]: I0929 09:40:11.081518 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/observability-operator-cc5f78dfc-42c95" podStartSLOduration=1.744798211 podStartE2EDuration="13.081502339s" podCreationTimestamp="2025-09-29 09:39:58 +0000 UTC" firstStartedPulling="2025-09-29 09:39:59.325764525 +0000 UTC m=+631.307088429" lastFinishedPulling="2025-09-29 09:40:10.662468663 +0000 UTC m=+642.643792557" observedRunningTime="2025-09-29 09:40:11.079025246 +0000 UTC m=+643.060349150" watchObservedRunningTime="2025-09-29 09:40:11.081502339 +0000 UTC m=+643.062826243" Sep 29 09:40:11 crc kubenswrapper[4779]: I0929 09:40:11.095649 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-vgkdn" podStartSLOduration=1.586125001 podStartE2EDuration="13.095633567s" podCreationTimestamp="2025-09-29 09:39:58 +0000 UTC" firstStartedPulling="2025-09-29 09:39:59.149871386 +0000 UTC m=+631.131195290" lastFinishedPulling="2025-09-29 09:40:10.659379952 +0000 UTC m=+642.640703856" observedRunningTime="2025-09-29 09:40:11.093598666 +0000 UTC m=+643.074922610" watchObservedRunningTime="2025-09-29 09:40:11.095633567 +0000 UTC m=+643.076957471" Sep 29 09:40:11 crc kubenswrapper[4779]: I0929 09:40:11.995091 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/observability-operator-cc5f78dfc-42c95" Sep 29 09:40:16 crc kubenswrapper[4779]: I0929 09:40:16.966419 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 09:40:16 crc kubenswrapper[4779]: I0929 09:40:16.966719 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 09:40:16 crc kubenswrapper[4779]: I0929 09:40:16.966761 4779 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" Sep 29 09:40:16 crc kubenswrapper[4779]: I0929 09:40:16.967309 4779 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7390d6cd5471c2ba2b59c4030a2423a19ab8e39bfa71091617e29773167c149f"} pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 09:40:16 crc kubenswrapper[4779]: I0929 09:40:16.967356 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" containerID="cri-o://7390d6cd5471c2ba2b59c4030a2423a19ab8e39bfa71091617e29773167c149f" gracePeriod=600 Sep 29 09:40:18 crc kubenswrapper[4779]: I0929 09:40:18.033475 4779 generic.go:334] "Generic (PLEG): container finished" podID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerID="7390d6cd5471c2ba2b59c4030a2423a19ab8e39bfa71091617e29773167c149f" exitCode=0 Sep 29 09:40:18 crc kubenswrapper[4779]: I0929 09:40:18.033515 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" event={"ID":"f1a5d3a7-37d9-4a87-864c-e4af7f504a19","Type":"ContainerDied","Data":"7390d6cd5471c2ba2b59c4030a2423a19ab8e39bfa71091617e29773167c149f"} Sep 29 09:40:18 crc kubenswrapper[4779]: I0929 09:40:18.034955 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" event={"ID":"f1a5d3a7-37d9-4a87-864c-e4af7f504a19","Type":"ContainerStarted","Data":"0e0bb92440d884ad90defff16322d948e1df51b9f7349061e9da58a7e515a610"} Sep 29 09:40:18 crc kubenswrapper[4779]: I0929 09:40:18.035022 4779 scope.go:117] "RemoveContainer" containerID="fce338249e8d781ebcc8dd4226aa44e91a894c7151c8ca6d0b4f7848ae00e827" Sep 29 09:40:18 crc kubenswrapper[4779]: I0929 09:40:18.971863 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/perses-operator-54bc95c9fb-9s4b8" Sep 29 09:40:36 crc kubenswrapper[4779]: I0929 09:40:36.296154 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc4h4pl"] Sep 29 09:40:36 crc kubenswrapper[4779]: I0929 09:40:36.297875 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc4h4pl" Sep 29 09:40:36 crc kubenswrapper[4779]: I0929 09:40:36.299824 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Sep 29 09:40:36 crc kubenswrapper[4779]: I0929 09:40:36.339026 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc4h4pl"] Sep 29 09:40:36 crc kubenswrapper[4779]: I0929 09:40:36.416102 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2b72d279-81fc-4298-bf86-21cdd90a2bb0-util\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc4h4pl\" (UID: \"2b72d279-81fc-4298-bf86-21cdd90a2bb0\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc4h4pl" Sep 29 09:40:36 crc kubenswrapper[4779]: I0929 09:40:36.416215 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2b72d279-81fc-4298-bf86-21cdd90a2bb0-bundle\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc4h4pl\" (UID: \"2b72d279-81fc-4298-bf86-21cdd90a2bb0\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc4h4pl" Sep 29 09:40:36 crc kubenswrapper[4779]: I0929 09:40:36.416319 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qg5fc\" (UniqueName: \"kubernetes.io/projected/2b72d279-81fc-4298-bf86-21cdd90a2bb0-kube-api-access-qg5fc\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc4h4pl\" (UID: \"2b72d279-81fc-4298-bf86-21cdd90a2bb0\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc4h4pl" Sep 29 09:40:36 crc kubenswrapper[4779]: I0929 09:40:36.517580 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qg5fc\" (UniqueName: \"kubernetes.io/projected/2b72d279-81fc-4298-bf86-21cdd90a2bb0-kube-api-access-qg5fc\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc4h4pl\" (UID: \"2b72d279-81fc-4298-bf86-21cdd90a2bb0\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc4h4pl" Sep 29 09:40:36 crc kubenswrapper[4779]: I0929 09:40:36.517684 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2b72d279-81fc-4298-bf86-21cdd90a2bb0-util\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc4h4pl\" (UID: \"2b72d279-81fc-4298-bf86-21cdd90a2bb0\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc4h4pl" Sep 29 09:40:36 crc kubenswrapper[4779]: I0929 09:40:36.517706 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2b72d279-81fc-4298-bf86-21cdd90a2bb0-bundle\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc4h4pl\" (UID: \"2b72d279-81fc-4298-bf86-21cdd90a2bb0\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc4h4pl" Sep 29 09:40:36 crc kubenswrapper[4779]: I0929 09:40:36.518183 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2b72d279-81fc-4298-bf86-21cdd90a2bb0-bundle\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc4h4pl\" (UID: \"2b72d279-81fc-4298-bf86-21cdd90a2bb0\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc4h4pl" Sep 29 09:40:36 crc kubenswrapper[4779]: I0929 09:40:36.518261 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2b72d279-81fc-4298-bf86-21cdd90a2bb0-util\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc4h4pl\" (UID: \"2b72d279-81fc-4298-bf86-21cdd90a2bb0\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc4h4pl" Sep 29 09:40:36 crc kubenswrapper[4779]: I0929 09:40:36.537962 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qg5fc\" (UniqueName: \"kubernetes.io/projected/2b72d279-81fc-4298-bf86-21cdd90a2bb0-kube-api-access-qg5fc\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc4h4pl\" (UID: \"2b72d279-81fc-4298-bf86-21cdd90a2bb0\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc4h4pl" Sep 29 09:40:36 crc kubenswrapper[4779]: I0929 09:40:36.616456 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc4h4pl" Sep 29 09:40:37 crc kubenswrapper[4779]: I0929 09:40:37.077130 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc4h4pl"] Sep 29 09:40:37 crc kubenswrapper[4779]: I0929 09:40:37.150941 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc4h4pl" event={"ID":"2b72d279-81fc-4298-bf86-21cdd90a2bb0","Type":"ContainerStarted","Data":"8ed81eca4964409de0074c7b8634a70d2c6f507a8c3c3f0a7b6cdbcc9c76d6f3"} Sep 29 09:40:38 crc kubenswrapper[4779]: I0929 09:40:38.156912 4779 generic.go:334] "Generic (PLEG): container finished" podID="2b72d279-81fc-4298-bf86-21cdd90a2bb0" containerID="b57fcfeeb6a74ddc8c0744bb6c5e6c3640bcd15f40cace478f32ebfd2d55ad90" exitCode=0 Sep 29 09:40:38 crc kubenswrapper[4779]: I0929 09:40:38.156969 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc4h4pl" event={"ID":"2b72d279-81fc-4298-bf86-21cdd90a2bb0","Type":"ContainerDied","Data":"b57fcfeeb6a74ddc8c0744bb6c5e6c3640bcd15f40cace478f32ebfd2d55ad90"} Sep 29 09:40:41 crc kubenswrapper[4779]: I0929 09:40:41.183439 4779 generic.go:334] "Generic (PLEG): container finished" podID="2b72d279-81fc-4298-bf86-21cdd90a2bb0" containerID="1b0d964fc4515db7631fa4f6aefc13a804a30588225aceaa6f0d626b51337fde" exitCode=0 Sep 29 09:40:41 crc kubenswrapper[4779]: I0929 09:40:41.183498 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc4h4pl" event={"ID":"2b72d279-81fc-4298-bf86-21cdd90a2bb0","Type":"ContainerDied","Data":"1b0d964fc4515db7631fa4f6aefc13a804a30588225aceaa6f0d626b51337fde"} Sep 29 09:40:42 crc kubenswrapper[4779]: I0929 09:40:42.192094 4779 generic.go:334] "Generic (PLEG): container finished" podID="2b72d279-81fc-4298-bf86-21cdd90a2bb0" containerID="acf7b3f5480dd2e095f54cc54755e4eb9db5320062b1fae92220b03918c17f42" exitCode=0 Sep 29 09:40:42 crc kubenswrapper[4779]: I0929 09:40:42.192185 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc4h4pl" event={"ID":"2b72d279-81fc-4298-bf86-21cdd90a2bb0","Type":"ContainerDied","Data":"acf7b3f5480dd2e095f54cc54755e4eb9db5320062b1fae92220b03918c17f42"} Sep 29 09:40:43 crc kubenswrapper[4779]: I0929 09:40:43.490079 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc4h4pl" Sep 29 09:40:43 crc kubenswrapper[4779]: I0929 09:40:43.499111 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5fc\" (UniqueName: \"kubernetes.io/projected/2b72d279-81fc-4298-bf86-21cdd90a2bb0-kube-api-access-qg5fc\") pod \"2b72d279-81fc-4298-bf86-21cdd90a2bb0\" (UID: \"2b72d279-81fc-4298-bf86-21cdd90a2bb0\") " Sep 29 09:40:43 crc kubenswrapper[4779]: I0929 09:40:43.499250 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2b72d279-81fc-4298-bf86-21cdd90a2bb0-util\") pod \"2b72d279-81fc-4298-bf86-21cdd90a2bb0\" (UID: \"2b72d279-81fc-4298-bf86-21cdd90a2bb0\") " Sep 29 09:40:43 crc kubenswrapper[4779]: I0929 09:40:43.499302 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2b72d279-81fc-4298-bf86-21cdd90a2bb0-bundle\") pod \"2b72d279-81fc-4298-bf86-21cdd90a2bb0\" (UID: \"2b72d279-81fc-4298-bf86-21cdd90a2bb0\") " Sep 29 09:40:43 crc kubenswrapper[4779]: I0929 09:40:43.500193 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2b72d279-81fc-4298-bf86-21cdd90a2bb0-bundle" (OuterVolumeSpecName: "bundle") pod "2b72d279-81fc-4298-bf86-21cdd90a2bb0" (UID: "2b72d279-81fc-4298-bf86-21cdd90a2bb0"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:40:43 crc kubenswrapper[4779]: I0929 09:40:43.506336 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2b72d279-81fc-4298-bf86-21cdd90a2bb0-kube-api-access-qg5fc" (OuterVolumeSpecName: "kube-api-access-qg5fc") pod "2b72d279-81fc-4298-bf86-21cdd90a2bb0" (UID: "2b72d279-81fc-4298-bf86-21cdd90a2bb0"). InnerVolumeSpecName "kube-api-access-qg5fc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:40:43 crc kubenswrapper[4779]: I0929 09:40:43.535596 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2b72d279-81fc-4298-bf86-21cdd90a2bb0-util" (OuterVolumeSpecName: "util") pod "2b72d279-81fc-4298-bf86-21cdd90a2bb0" (UID: "2b72d279-81fc-4298-bf86-21cdd90a2bb0"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:40:43 crc kubenswrapper[4779]: I0929 09:40:43.600624 4779 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2b72d279-81fc-4298-bf86-21cdd90a2bb0-util\") on node \"crc\" DevicePath \"\"" Sep 29 09:40:43 crc kubenswrapper[4779]: I0929 09:40:43.600662 4779 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2b72d279-81fc-4298-bf86-21cdd90a2bb0-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 09:40:43 crc kubenswrapper[4779]: I0929 09:40:43.600671 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5fc\" (UniqueName: \"kubernetes.io/projected/2b72d279-81fc-4298-bf86-21cdd90a2bb0-kube-api-access-qg5fc\") on node \"crc\" DevicePath \"\"" Sep 29 09:40:44 crc kubenswrapper[4779]: I0929 09:40:44.206801 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc4h4pl" event={"ID":"2b72d279-81fc-4298-bf86-21cdd90a2bb0","Type":"ContainerDied","Data":"8ed81eca4964409de0074c7b8634a70d2c6f507a8c3c3f0a7b6cdbcc9c76d6f3"} Sep 29 09:40:44 crc kubenswrapper[4779]: I0929 09:40:44.206844 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8ed81eca4964409de0074c7b8634a70d2c6f507a8c3c3f0a7b6cdbcc9c76d6f3" Sep 29 09:40:44 crc kubenswrapper[4779]: I0929 09:40:44.206926 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc4h4pl" Sep 29 09:40:47 crc kubenswrapper[4779]: I0929 09:40:47.842946 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-5d6f6cfd66-54tvp"] Sep 29 09:40:47 crc kubenswrapper[4779]: E0929 09:40:47.843762 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b72d279-81fc-4298-bf86-21cdd90a2bb0" containerName="pull" Sep 29 09:40:47 crc kubenswrapper[4779]: I0929 09:40:47.843779 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b72d279-81fc-4298-bf86-21cdd90a2bb0" containerName="pull" Sep 29 09:40:47 crc kubenswrapper[4779]: E0929 09:40:47.843794 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b72d279-81fc-4298-bf86-21cdd90a2bb0" containerName="extract" Sep 29 09:40:47 crc kubenswrapper[4779]: I0929 09:40:47.843802 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b72d279-81fc-4298-bf86-21cdd90a2bb0" containerName="extract" Sep 29 09:40:47 crc kubenswrapper[4779]: E0929 09:40:47.843812 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b72d279-81fc-4298-bf86-21cdd90a2bb0" containerName="util" Sep 29 09:40:47 crc kubenswrapper[4779]: I0929 09:40:47.843819 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b72d279-81fc-4298-bf86-21cdd90a2bb0" containerName="util" Sep 29 09:40:47 crc kubenswrapper[4779]: I0929 09:40:47.843950 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="2b72d279-81fc-4298-bf86-21cdd90a2bb0" containerName="extract" Sep 29 09:40:47 crc kubenswrapper[4779]: I0929 09:40:47.844425 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-54tvp" Sep 29 09:40:47 crc kubenswrapper[4779]: I0929 09:40:47.846706 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Sep 29 09:40:47 crc kubenswrapper[4779]: I0929 09:40:47.846942 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Sep 29 09:40:47 crc kubenswrapper[4779]: I0929 09:40:47.847284 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-qwmw2" Sep 29 09:40:47 crc kubenswrapper[4779]: I0929 09:40:47.852835 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5d6f6cfd66-54tvp"] Sep 29 09:40:47 crc kubenswrapper[4779]: I0929 09:40:47.856925 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-52wvx\" (UniqueName: \"kubernetes.io/projected/91fe307a-bb82-495b-877d-d02d46ddf9b0-kube-api-access-52wvx\") pod \"nmstate-operator-5d6f6cfd66-54tvp\" (UID: \"91fe307a-bb82-495b-877d-d02d46ddf9b0\") " pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-54tvp" Sep 29 09:40:47 crc kubenswrapper[4779]: I0929 09:40:47.958320 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-52wvx\" (UniqueName: \"kubernetes.io/projected/91fe307a-bb82-495b-877d-d02d46ddf9b0-kube-api-access-52wvx\") pod \"nmstate-operator-5d6f6cfd66-54tvp\" (UID: \"91fe307a-bb82-495b-877d-d02d46ddf9b0\") " pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-54tvp" Sep 29 09:40:47 crc kubenswrapper[4779]: I0929 09:40:47.982131 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-52wvx\" (UniqueName: \"kubernetes.io/projected/91fe307a-bb82-495b-877d-d02d46ddf9b0-kube-api-access-52wvx\") pod \"nmstate-operator-5d6f6cfd66-54tvp\" (UID: \"91fe307a-bb82-495b-877d-d02d46ddf9b0\") " pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-54tvp" Sep 29 09:40:48 crc kubenswrapper[4779]: I0929 09:40:48.157589 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-54tvp" Sep 29 09:40:48 crc kubenswrapper[4779]: I0929 09:40:48.681881 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5d6f6cfd66-54tvp"] Sep 29 09:40:49 crc kubenswrapper[4779]: I0929 09:40:49.233887 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-54tvp" event={"ID":"91fe307a-bb82-495b-877d-d02d46ddf9b0","Type":"ContainerStarted","Data":"8cafaacdfec01123116e8cf78c01a1f3c048ee52cbbb0246caf62ea60ea76bd5"} Sep 29 09:40:51 crc kubenswrapper[4779]: I0929 09:40:51.247725 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-54tvp" event={"ID":"91fe307a-bb82-495b-877d-d02d46ddf9b0","Type":"ContainerStarted","Data":"9f03a9d9f01fea08a5ac6e1c6042abfe9c8b2d48cd70cf919559c2a2dca301b9"} Sep 29 09:40:51 crc kubenswrapper[4779]: I0929 09:40:51.264850 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-54tvp" podStartSLOduration=2.394519774 podStartE2EDuration="4.264831208s" podCreationTimestamp="2025-09-29 09:40:47 +0000 UTC" firstStartedPulling="2025-09-29 09:40:48.693713659 +0000 UTC m=+680.675037563" lastFinishedPulling="2025-09-29 09:40:50.564025093 +0000 UTC m=+682.545348997" observedRunningTime="2025-09-29 09:40:51.262517222 +0000 UTC m=+683.243841126" watchObservedRunningTime="2025-09-29 09:40:51.264831208 +0000 UTC m=+683.246155112" Sep 29 09:40:57 crc kubenswrapper[4779]: I0929 09:40:57.582228 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-58fcddf996-mfvhk"] Sep 29 09:40:57 crc kubenswrapper[4779]: I0929 09:40:57.583954 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-58fcddf996-mfvhk" Sep 29 09:40:57 crc kubenswrapper[4779]: I0929 09:40:57.586309 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-kxtvm" Sep 29 09:40:57 crc kubenswrapper[4779]: I0929 09:40:57.607103 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-58fcddf996-mfvhk"] Sep 29 09:40:57 crc kubenswrapper[4779]: I0929 09:40:57.618894 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-w26jk"] Sep 29 09:40:57 crc kubenswrapper[4779]: I0929 09:40:57.620041 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-w26jk" Sep 29 09:40:57 crc kubenswrapper[4779]: I0929 09:40:57.630986 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-6d689559c5-k8phl"] Sep 29 09:40:57 crc kubenswrapper[4779]: I0929 09:40:57.632405 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6d689559c5-k8phl" Sep 29 09:40:57 crc kubenswrapper[4779]: I0929 09:40:57.635191 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Sep 29 09:40:57 crc kubenswrapper[4779]: I0929 09:40:57.670349 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6d689559c5-k8phl"] Sep 29 09:40:57 crc kubenswrapper[4779]: I0929 09:40:57.684096 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5fvbr\" (UniqueName: \"kubernetes.io/projected/885dfe51-81d6-456c-b9df-ca6983913dd6-kube-api-access-5fvbr\") pod \"nmstate-metrics-58fcddf996-mfvhk\" (UID: \"885dfe51-81d6-456c-b9df-ca6983913dd6\") " pod="openshift-nmstate/nmstate-metrics-58fcddf996-mfvhk" Sep 29 09:40:57 crc kubenswrapper[4779]: I0929 09:40:57.684138 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/4586a430-c13c-40b4-997f-2999ffbe07df-ovs-socket\") pod \"nmstate-handler-w26jk\" (UID: \"4586a430-c13c-40b4-997f-2999ffbe07df\") " pod="openshift-nmstate/nmstate-handler-w26jk" Sep 29 09:40:57 crc kubenswrapper[4779]: I0929 09:40:57.684191 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/b85e4091-5a43-4976-adb2-b9af4e2cdd06-tls-key-pair\") pod \"nmstate-webhook-6d689559c5-k8phl\" (UID: \"b85e4091-5a43-4976-adb2-b9af4e2cdd06\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-k8phl" Sep 29 09:40:57 crc kubenswrapper[4779]: I0929 09:40:57.684206 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/4586a430-c13c-40b4-997f-2999ffbe07df-dbus-socket\") pod \"nmstate-handler-w26jk\" (UID: \"4586a430-c13c-40b4-997f-2999ffbe07df\") " pod="openshift-nmstate/nmstate-handler-w26jk" Sep 29 09:40:57 crc kubenswrapper[4779]: I0929 09:40:57.684245 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hmx64\" (UniqueName: \"kubernetes.io/projected/4586a430-c13c-40b4-997f-2999ffbe07df-kube-api-access-hmx64\") pod \"nmstate-handler-w26jk\" (UID: \"4586a430-c13c-40b4-997f-2999ffbe07df\") " pod="openshift-nmstate/nmstate-handler-w26jk" Sep 29 09:40:57 crc kubenswrapper[4779]: I0929 09:40:57.684275 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6ffcj\" (UniqueName: \"kubernetes.io/projected/b85e4091-5a43-4976-adb2-b9af4e2cdd06-kube-api-access-6ffcj\") pod \"nmstate-webhook-6d689559c5-k8phl\" (UID: \"b85e4091-5a43-4976-adb2-b9af4e2cdd06\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-k8phl" Sep 29 09:40:57 crc kubenswrapper[4779]: I0929 09:40:57.684305 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/4586a430-c13c-40b4-997f-2999ffbe07df-nmstate-lock\") pod \"nmstate-handler-w26jk\" (UID: \"4586a430-c13c-40b4-997f-2999ffbe07df\") " pod="openshift-nmstate/nmstate-handler-w26jk" Sep 29 09:40:57 crc kubenswrapper[4779]: I0929 09:40:57.734037 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-864bb6dfb5-bdlk5"] Sep 29 09:40:57 crc kubenswrapper[4779]: I0929 09:40:57.734707 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-bdlk5" Sep 29 09:40:57 crc kubenswrapper[4779]: I0929 09:40:57.736712 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-b4hf7" Sep 29 09:40:57 crc kubenswrapper[4779]: I0929 09:40:57.736890 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Sep 29 09:40:57 crc kubenswrapper[4779]: I0929 09:40:57.737052 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Sep 29 09:40:57 crc kubenswrapper[4779]: I0929 09:40:57.749981 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-864bb6dfb5-bdlk5"] Sep 29 09:40:57 crc kubenswrapper[4779]: I0929 09:40:57.785338 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hmx64\" (UniqueName: \"kubernetes.io/projected/4586a430-c13c-40b4-997f-2999ffbe07df-kube-api-access-hmx64\") pod \"nmstate-handler-w26jk\" (UID: \"4586a430-c13c-40b4-997f-2999ffbe07df\") " pod="openshift-nmstate/nmstate-handler-w26jk" Sep 29 09:40:57 crc kubenswrapper[4779]: I0929 09:40:57.785532 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/9a02daad-b7b9-4bc9-be5d-0a7e0c6d6c89-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-bdlk5\" (UID: \"9a02daad-b7b9-4bc9-be5d-0a7e0c6d6c89\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-bdlk5" Sep 29 09:40:57 crc kubenswrapper[4779]: I0929 09:40:57.785569 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6ffcj\" (UniqueName: \"kubernetes.io/projected/b85e4091-5a43-4976-adb2-b9af4e2cdd06-kube-api-access-6ffcj\") pod \"nmstate-webhook-6d689559c5-k8phl\" (UID: \"b85e4091-5a43-4976-adb2-b9af4e2cdd06\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-k8phl" Sep 29 09:40:57 crc kubenswrapper[4779]: I0929 09:40:57.785614 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/4586a430-c13c-40b4-997f-2999ffbe07df-nmstate-lock\") pod \"nmstate-handler-w26jk\" (UID: \"4586a430-c13c-40b4-997f-2999ffbe07df\") " pod="openshift-nmstate/nmstate-handler-w26jk" Sep 29 09:40:57 crc kubenswrapper[4779]: I0929 09:40:57.785656 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5fvbr\" (UniqueName: \"kubernetes.io/projected/885dfe51-81d6-456c-b9df-ca6983913dd6-kube-api-access-5fvbr\") pod \"nmstate-metrics-58fcddf996-mfvhk\" (UID: \"885dfe51-81d6-456c-b9df-ca6983913dd6\") " pod="openshift-nmstate/nmstate-metrics-58fcddf996-mfvhk" Sep 29 09:40:57 crc kubenswrapper[4779]: I0929 09:40:57.785678 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/4586a430-c13c-40b4-997f-2999ffbe07df-ovs-socket\") pod \"nmstate-handler-w26jk\" (UID: \"4586a430-c13c-40b4-997f-2999ffbe07df\") " pod="openshift-nmstate/nmstate-handler-w26jk" Sep 29 09:40:57 crc kubenswrapper[4779]: I0929 09:40:57.785727 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/b85e4091-5a43-4976-adb2-b9af4e2cdd06-tls-key-pair\") pod \"nmstate-webhook-6d689559c5-k8phl\" (UID: \"b85e4091-5a43-4976-adb2-b9af4e2cdd06\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-k8phl" Sep 29 09:40:57 crc kubenswrapper[4779]: I0929 09:40:57.785755 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/9a02daad-b7b9-4bc9-be5d-0a7e0c6d6c89-nginx-conf\") pod \"nmstate-console-plugin-864bb6dfb5-bdlk5\" (UID: \"9a02daad-b7b9-4bc9-be5d-0a7e0c6d6c89\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-bdlk5" Sep 29 09:40:57 crc kubenswrapper[4779]: I0929 09:40:57.785782 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/4586a430-c13c-40b4-997f-2999ffbe07df-dbus-socket\") pod \"nmstate-handler-w26jk\" (UID: \"4586a430-c13c-40b4-997f-2999ffbe07df\") " pod="openshift-nmstate/nmstate-handler-w26jk" Sep 29 09:40:57 crc kubenswrapper[4779]: I0929 09:40:57.785822 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jk2tq\" (UniqueName: \"kubernetes.io/projected/9a02daad-b7b9-4bc9-be5d-0a7e0c6d6c89-kube-api-access-jk2tq\") pod \"nmstate-console-plugin-864bb6dfb5-bdlk5\" (UID: \"9a02daad-b7b9-4bc9-be5d-0a7e0c6d6c89\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-bdlk5" Sep 29 09:40:57 crc kubenswrapper[4779]: I0929 09:40:57.786111 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/4586a430-c13c-40b4-997f-2999ffbe07df-nmstate-lock\") pod \"nmstate-handler-w26jk\" (UID: \"4586a430-c13c-40b4-997f-2999ffbe07df\") " pod="openshift-nmstate/nmstate-handler-w26jk" Sep 29 09:40:57 crc kubenswrapper[4779]: I0929 09:40:57.786118 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/4586a430-c13c-40b4-997f-2999ffbe07df-ovs-socket\") pod \"nmstate-handler-w26jk\" (UID: \"4586a430-c13c-40b4-997f-2999ffbe07df\") " pod="openshift-nmstate/nmstate-handler-w26jk" Sep 29 09:40:57 crc kubenswrapper[4779]: I0929 09:40:57.786353 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/4586a430-c13c-40b4-997f-2999ffbe07df-dbus-socket\") pod \"nmstate-handler-w26jk\" (UID: \"4586a430-c13c-40b4-997f-2999ffbe07df\") " pod="openshift-nmstate/nmstate-handler-w26jk" Sep 29 09:40:57 crc kubenswrapper[4779]: I0929 09:40:57.790880 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/b85e4091-5a43-4976-adb2-b9af4e2cdd06-tls-key-pair\") pod \"nmstate-webhook-6d689559c5-k8phl\" (UID: \"b85e4091-5a43-4976-adb2-b9af4e2cdd06\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-k8phl" Sep 29 09:40:57 crc kubenswrapper[4779]: I0929 09:40:57.802770 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hmx64\" (UniqueName: \"kubernetes.io/projected/4586a430-c13c-40b4-997f-2999ffbe07df-kube-api-access-hmx64\") pod \"nmstate-handler-w26jk\" (UID: \"4586a430-c13c-40b4-997f-2999ffbe07df\") " pod="openshift-nmstate/nmstate-handler-w26jk" Sep 29 09:40:57 crc kubenswrapper[4779]: I0929 09:40:57.804759 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6ffcj\" (UniqueName: \"kubernetes.io/projected/b85e4091-5a43-4976-adb2-b9af4e2cdd06-kube-api-access-6ffcj\") pod \"nmstate-webhook-6d689559c5-k8phl\" (UID: \"b85e4091-5a43-4976-adb2-b9af4e2cdd06\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-k8phl" Sep 29 09:40:57 crc kubenswrapper[4779]: I0929 09:40:57.805467 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5fvbr\" (UniqueName: \"kubernetes.io/projected/885dfe51-81d6-456c-b9df-ca6983913dd6-kube-api-access-5fvbr\") pod \"nmstate-metrics-58fcddf996-mfvhk\" (UID: \"885dfe51-81d6-456c-b9df-ca6983913dd6\") " pod="openshift-nmstate/nmstate-metrics-58fcddf996-mfvhk" Sep 29 09:40:57 crc kubenswrapper[4779]: I0929 09:40:57.887037 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/9a02daad-b7b9-4bc9-be5d-0a7e0c6d6c89-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-bdlk5\" (UID: \"9a02daad-b7b9-4bc9-be5d-0a7e0c6d6c89\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-bdlk5" Sep 29 09:40:57 crc kubenswrapper[4779]: I0929 09:40:57.887320 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/9a02daad-b7b9-4bc9-be5d-0a7e0c6d6c89-nginx-conf\") pod \"nmstate-console-plugin-864bb6dfb5-bdlk5\" (UID: \"9a02daad-b7b9-4bc9-be5d-0a7e0c6d6c89\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-bdlk5" Sep 29 09:40:57 crc kubenswrapper[4779]: I0929 09:40:57.887342 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jk2tq\" (UniqueName: \"kubernetes.io/projected/9a02daad-b7b9-4bc9-be5d-0a7e0c6d6c89-kube-api-access-jk2tq\") pod \"nmstate-console-plugin-864bb6dfb5-bdlk5\" (UID: \"9a02daad-b7b9-4bc9-be5d-0a7e0c6d6c89\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-bdlk5" Sep 29 09:40:57 crc kubenswrapper[4779]: E0929 09:40:57.887206 4779 secret.go:188] Couldn't get secret openshift-nmstate/plugin-serving-cert: secret "plugin-serving-cert" not found Sep 29 09:40:57 crc kubenswrapper[4779]: E0929 09:40:57.887437 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9a02daad-b7b9-4bc9-be5d-0a7e0c6d6c89-plugin-serving-cert podName:9a02daad-b7b9-4bc9-be5d-0a7e0c6d6c89 nodeName:}" failed. No retries permitted until 2025-09-29 09:40:58.387415037 +0000 UTC m=+690.368738951 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "plugin-serving-cert" (UniqueName: "kubernetes.io/secret/9a02daad-b7b9-4bc9-be5d-0a7e0c6d6c89-plugin-serving-cert") pod "nmstate-console-plugin-864bb6dfb5-bdlk5" (UID: "9a02daad-b7b9-4bc9-be5d-0a7e0c6d6c89") : secret "plugin-serving-cert" not found Sep 29 09:40:57 crc kubenswrapper[4779]: I0929 09:40:57.888240 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/9a02daad-b7b9-4bc9-be5d-0a7e0c6d6c89-nginx-conf\") pod \"nmstate-console-plugin-864bb6dfb5-bdlk5\" (UID: \"9a02daad-b7b9-4bc9-be5d-0a7e0c6d6c89\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-bdlk5" Sep 29 09:40:57 crc kubenswrapper[4779]: I0929 09:40:57.908327 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jk2tq\" (UniqueName: \"kubernetes.io/projected/9a02daad-b7b9-4bc9-be5d-0a7e0c6d6c89-kube-api-access-jk2tq\") pod \"nmstate-console-plugin-864bb6dfb5-bdlk5\" (UID: \"9a02daad-b7b9-4bc9-be5d-0a7e0c6d6c89\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-bdlk5" Sep 29 09:40:57 crc kubenswrapper[4779]: I0929 09:40:57.918613 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-58fcddf996-mfvhk" Sep 29 09:40:57 crc kubenswrapper[4779]: I0929 09:40:57.922131 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-595988c68d-2g4sp"] Sep 29 09:40:57 crc kubenswrapper[4779]: I0929 09:40:57.923015 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-595988c68d-2g4sp" Sep 29 09:40:57 crc kubenswrapper[4779]: I0929 09:40:57.938616 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-595988c68d-2g4sp"] Sep 29 09:40:57 crc kubenswrapper[4779]: I0929 09:40:57.944019 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-w26jk" Sep 29 09:40:57 crc kubenswrapper[4779]: I0929 09:40:57.962556 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6d689559c5-k8phl" Sep 29 09:40:57 crc kubenswrapper[4779]: I0929 09:40:57.988533 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/2b112f1f-4f29-4987-a78e-e6c0934f6876-oauth-serving-cert\") pod \"console-595988c68d-2g4sp\" (UID: \"2b112f1f-4f29-4987-a78e-e6c0934f6876\") " pod="openshift-console/console-595988c68d-2g4sp" Sep 29 09:40:57 crc kubenswrapper[4779]: I0929 09:40:57.988612 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/2b112f1f-4f29-4987-a78e-e6c0934f6876-service-ca\") pod \"console-595988c68d-2g4sp\" (UID: \"2b112f1f-4f29-4987-a78e-e6c0934f6876\") " pod="openshift-console/console-595988c68d-2g4sp" Sep 29 09:40:57 crc kubenswrapper[4779]: I0929 09:40:57.988697 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/2b112f1f-4f29-4987-a78e-e6c0934f6876-console-config\") pod \"console-595988c68d-2g4sp\" (UID: \"2b112f1f-4f29-4987-a78e-e6c0934f6876\") " pod="openshift-console/console-595988c68d-2g4sp" Sep 29 09:40:57 crc kubenswrapper[4779]: I0929 09:40:57.988758 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/2b112f1f-4f29-4987-a78e-e6c0934f6876-console-oauth-config\") pod \"console-595988c68d-2g4sp\" (UID: \"2b112f1f-4f29-4987-a78e-e6c0934f6876\") " pod="openshift-console/console-595988c68d-2g4sp" Sep 29 09:40:57 crc kubenswrapper[4779]: I0929 09:40:57.988787 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/2b112f1f-4f29-4987-a78e-e6c0934f6876-console-serving-cert\") pod \"console-595988c68d-2g4sp\" (UID: \"2b112f1f-4f29-4987-a78e-e6c0934f6876\") " pod="openshift-console/console-595988c68d-2g4sp" Sep 29 09:40:57 crc kubenswrapper[4779]: I0929 09:40:57.988851 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dd7xp\" (UniqueName: \"kubernetes.io/projected/2b112f1f-4f29-4987-a78e-e6c0934f6876-kube-api-access-dd7xp\") pod \"console-595988c68d-2g4sp\" (UID: \"2b112f1f-4f29-4987-a78e-e6c0934f6876\") " pod="openshift-console/console-595988c68d-2g4sp" Sep 29 09:40:57 crc kubenswrapper[4779]: I0929 09:40:57.988996 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2b112f1f-4f29-4987-a78e-e6c0934f6876-trusted-ca-bundle\") pod \"console-595988c68d-2g4sp\" (UID: \"2b112f1f-4f29-4987-a78e-e6c0934f6876\") " pod="openshift-console/console-595988c68d-2g4sp" Sep 29 09:40:58 crc kubenswrapper[4779]: I0929 09:40:58.090582 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/2b112f1f-4f29-4987-a78e-e6c0934f6876-console-oauth-config\") pod \"console-595988c68d-2g4sp\" (UID: \"2b112f1f-4f29-4987-a78e-e6c0934f6876\") " pod="openshift-console/console-595988c68d-2g4sp" Sep 29 09:40:58 crc kubenswrapper[4779]: I0929 09:40:58.090979 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/2b112f1f-4f29-4987-a78e-e6c0934f6876-console-serving-cert\") pod \"console-595988c68d-2g4sp\" (UID: \"2b112f1f-4f29-4987-a78e-e6c0934f6876\") " pod="openshift-console/console-595988c68d-2g4sp" Sep 29 09:40:58 crc kubenswrapper[4779]: I0929 09:40:58.091003 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dd7xp\" (UniqueName: \"kubernetes.io/projected/2b112f1f-4f29-4987-a78e-e6c0934f6876-kube-api-access-dd7xp\") pod \"console-595988c68d-2g4sp\" (UID: \"2b112f1f-4f29-4987-a78e-e6c0934f6876\") " pod="openshift-console/console-595988c68d-2g4sp" Sep 29 09:40:58 crc kubenswrapper[4779]: I0929 09:40:58.091079 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2b112f1f-4f29-4987-a78e-e6c0934f6876-trusted-ca-bundle\") pod \"console-595988c68d-2g4sp\" (UID: \"2b112f1f-4f29-4987-a78e-e6c0934f6876\") " pod="openshift-console/console-595988c68d-2g4sp" Sep 29 09:40:58 crc kubenswrapper[4779]: I0929 09:40:58.091104 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/2b112f1f-4f29-4987-a78e-e6c0934f6876-oauth-serving-cert\") pod \"console-595988c68d-2g4sp\" (UID: \"2b112f1f-4f29-4987-a78e-e6c0934f6876\") " pod="openshift-console/console-595988c68d-2g4sp" Sep 29 09:40:58 crc kubenswrapper[4779]: I0929 09:40:58.091130 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/2b112f1f-4f29-4987-a78e-e6c0934f6876-service-ca\") pod \"console-595988c68d-2g4sp\" (UID: \"2b112f1f-4f29-4987-a78e-e6c0934f6876\") " pod="openshift-console/console-595988c68d-2g4sp" Sep 29 09:40:58 crc kubenswrapper[4779]: I0929 09:40:58.091149 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/2b112f1f-4f29-4987-a78e-e6c0934f6876-console-config\") pod \"console-595988c68d-2g4sp\" (UID: \"2b112f1f-4f29-4987-a78e-e6c0934f6876\") " pod="openshift-console/console-595988c68d-2g4sp" Sep 29 09:40:58 crc kubenswrapper[4779]: I0929 09:40:58.093476 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/2b112f1f-4f29-4987-a78e-e6c0934f6876-console-config\") pod \"console-595988c68d-2g4sp\" (UID: \"2b112f1f-4f29-4987-a78e-e6c0934f6876\") " pod="openshift-console/console-595988c68d-2g4sp" Sep 29 09:40:58 crc kubenswrapper[4779]: I0929 09:40:58.093676 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/2b112f1f-4f29-4987-a78e-e6c0934f6876-oauth-serving-cert\") pod \"console-595988c68d-2g4sp\" (UID: \"2b112f1f-4f29-4987-a78e-e6c0934f6876\") " pod="openshift-console/console-595988c68d-2g4sp" Sep 29 09:40:58 crc kubenswrapper[4779]: I0929 09:40:58.094387 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/2b112f1f-4f29-4987-a78e-e6c0934f6876-service-ca\") pod \"console-595988c68d-2g4sp\" (UID: \"2b112f1f-4f29-4987-a78e-e6c0934f6876\") " pod="openshift-console/console-595988c68d-2g4sp" Sep 29 09:40:58 crc kubenswrapper[4779]: I0929 09:40:58.094961 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2b112f1f-4f29-4987-a78e-e6c0934f6876-trusted-ca-bundle\") pod \"console-595988c68d-2g4sp\" (UID: \"2b112f1f-4f29-4987-a78e-e6c0934f6876\") " pod="openshift-console/console-595988c68d-2g4sp" Sep 29 09:40:58 crc kubenswrapper[4779]: I0929 09:40:58.099862 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/2b112f1f-4f29-4987-a78e-e6c0934f6876-console-serving-cert\") pod \"console-595988c68d-2g4sp\" (UID: \"2b112f1f-4f29-4987-a78e-e6c0934f6876\") " pod="openshift-console/console-595988c68d-2g4sp" Sep 29 09:40:58 crc kubenswrapper[4779]: I0929 09:40:58.102562 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/2b112f1f-4f29-4987-a78e-e6c0934f6876-console-oauth-config\") pod \"console-595988c68d-2g4sp\" (UID: \"2b112f1f-4f29-4987-a78e-e6c0934f6876\") " pod="openshift-console/console-595988c68d-2g4sp" Sep 29 09:40:58 crc kubenswrapper[4779]: I0929 09:40:58.118029 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dd7xp\" (UniqueName: \"kubernetes.io/projected/2b112f1f-4f29-4987-a78e-e6c0934f6876-kube-api-access-dd7xp\") pod \"console-595988c68d-2g4sp\" (UID: \"2b112f1f-4f29-4987-a78e-e6c0934f6876\") " pod="openshift-console/console-595988c68d-2g4sp" Sep 29 09:40:58 crc kubenswrapper[4779]: I0929 09:40:58.185117 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-58fcddf996-mfvhk"] Sep 29 09:40:58 crc kubenswrapper[4779]: I0929 09:40:58.275435 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-595988c68d-2g4sp" Sep 29 09:40:58 crc kubenswrapper[4779]: I0929 09:40:58.293096 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-w26jk" event={"ID":"4586a430-c13c-40b4-997f-2999ffbe07df","Type":"ContainerStarted","Data":"2c7bf60855db7b34a762c06d9c569e76426c3956a6e9afc6751b2ded0033ba9d"} Sep 29 09:40:58 crc kubenswrapper[4779]: I0929 09:40:58.293992 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-58fcddf996-mfvhk" event={"ID":"885dfe51-81d6-456c-b9df-ca6983913dd6","Type":"ContainerStarted","Data":"2b619422869907309068df95fa875c646db888776888817101e9e3d75b13165b"} Sep 29 09:40:58 crc kubenswrapper[4779]: I0929 09:40:58.394200 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/9a02daad-b7b9-4bc9-be5d-0a7e0c6d6c89-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-bdlk5\" (UID: \"9a02daad-b7b9-4bc9-be5d-0a7e0c6d6c89\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-bdlk5" Sep 29 09:40:58 crc kubenswrapper[4779]: I0929 09:40:58.400586 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/9a02daad-b7b9-4bc9-be5d-0a7e0c6d6c89-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-bdlk5\" (UID: \"9a02daad-b7b9-4bc9-be5d-0a7e0c6d6c89\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-bdlk5" Sep 29 09:40:58 crc kubenswrapper[4779]: I0929 09:40:58.419702 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6d689559c5-k8phl"] Sep 29 09:40:58 crc kubenswrapper[4779]: I0929 09:40:58.662537 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-bdlk5" Sep 29 09:40:58 crc kubenswrapper[4779]: I0929 09:40:58.685664 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-595988c68d-2g4sp"] Sep 29 09:40:58 crc kubenswrapper[4779]: W0929 09:40:58.690778 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2b112f1f_4f29_4987_a78e_e6c0934f6876.slice/crio-235f73e7cb659d438abfa2d62c9aa432467afb115824d2defeea0c6e417f6442 WatchSource:0}: Error finding container 235f73e7cb659d438abfa2d62c9aa432467afb115824d2defeea0c6e417f6442: Status 404 returned error can't find the container with id 235f73e7cb659d438abfa2d62c9aa432467afb115824d2defeea0c6e417f6442 Sep 29 09:40:58 crc kubenswrapper[4779]: I0929 09:40:58.875285 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-864bb6dfb5-bdlk5"] Sep 29 09:40:58 crc kubenswrapper[4779]: W0929 09:40:58.881199 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9a02daad_b7b9_4bc9_be5d_0a7e0c6d6c89.slice/crio-9bdcb60d2fa9be22e2f77b8c377147aa81e5547f234d7c07a38594610c91a488 WatchSource:0}: Error finding container 9bdcb60d2fa9be22e2f77b8c377147aa81e5547f234d7c07a38594610c91a488: Status 404 returned error can't find the container with id 9bdcb60d2fa9be22e2f77b8c377147aa81e5547f234d7c07a38594610c91a488 Sep 29 09:40:59 crc kubenswrapper[4779]: I0929 09:40:59.304403 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6d689559c5-k8phl" event={"ID":"b85e4091-5a43-4976-adb2-b9af4e2cdd06","Type":"ContainerStarted","Data":"ffca80e39290a77cedbcb32e22c39dbc77763f639f3b77fe65ca59ae3e2ac766"} Sep 29 09:40:59 crc kubenswrapper[4779]: I0929 09:40:59.305592 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-bdlk5" event={"ID":"9a02daad-b7b9-4bc9-be5d-0a7e0c6d6c89","Type":"ContainerStarted","Data":"9bdcb60d2fa9be22e2f77b8c377147aa81e5547f234d7c07a38594610c91a488"} Sep 29 09:40:59 crc kubenswrapper[4779]: I0929 09:40:59.307159 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-595988c68d-2g4sp" event={"ID":"2b112f1f-4f29-4987-a78e-e6c0934f6876","Type":"ContainerStarted","Data":"e270a4d40e51da10f02d0fc8a75cbfe7ab6a2a59539b3c6d16a3ed8ab1aeed3d"} Sep 29 09:40:59 crc kubenswrapper[4779]: I0929 09:40:59.307189 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-595988c68d-2g4sp" event={"ID":"2b112f1f-4f29-4987-a78e-e6c0934f6876","Type":"ContainerStarted","Data":"235f73e7cb659d438abfa2d62c9aa432467afb115824d2defeea0c6e417f6442"} Sep 29 09:40:59 crc kubenswrapper[4779]: I0929 09:40:59.327019 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-595988c68d-2g4sp" podStartSLOduration=2.326998357 podStartE2EDuration="2.326998357s" podCreationTimestamp="2025-09-29 09:40:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:40:59.322974313 +0000 UTC m=+691.304298247" watchObservedRunningTime="2025-09-29 09:40:59.326998357 +0000 UTC m=+691.308322261" Sep 29 09:41:01 crc kubenswrapper[4779]: I0929 09:41:01.327168 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-58fcddf996-mfvhk" event={"ID":"885dfe51-81d6-456c-b9df-ca6983913dd6","Type":"ContainerStarted","Data":"62cdc096ba8c0f8a05952d9dddf44ba3b72e5facf4a5c4abc7fb4bacb502260a"} Sep 29 09:41:01 crc kubenswrapper[4779]: I0929 09:41:01.328580 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6d689559c5-k8phl" event={"ID":"b85e4091-5a43-4976-adb2-b9af4e2cdd06","Type":"ContainerStarted","Data":"684feede5740f9d40e3b08986d207c01668f60c15278b5f4dd15bf4fd75d7e68"} Sep 29 09:41:01 crc kubenswrapper[4779]: I0929 09:41:01.329575 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-6d689559c5-k8phl" Sep 29 09:41:01 crc kubenswrapper[4779]: I0929 09:41:01.332573 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-bdlk5" event={"ID":"9a02daad-b7b9-4bc9-be5d-0a7e0c6d6c89","Type":"ContainerStarted","Data":"9d2b4f17ea645647d9ad26913b166550969a5f3123cf22cf26e03d44b83824d4"} Sep 29 09:41:01 crc kubenswrapper[4779]: I0929 09:41:01.334790 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-w26jk" event={"ID":"4586a430-c13c-40b4-997f-2999ffbe07df","Type":"ContainerStarted","Data":"d84006f0e183fab9880a9df9fdf9b225e60314cbdbfcef62b7e4e81a368905e8"} Sep 29 09:41:01 crc kubenswrapper[4779]: I0929 09:41:01.335287 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-w26jk" Sep 29 09:41:01 crc kubenswrapper[4779]: I0929 09:41:01.349132 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-6d689559c5-k8phl" podStartSLOduration=2.417734426 podStartE2EDuration="4.349112851s" podCreationTimestamp="2025-09-29 09:40:57 +0000 UTC" firstStartedPulling="2025-09-29 09:40:58.437849573 +0000 UTC m=+690.419173477" lastFinishedPulling="2025-09-29 09:41:00.369227998 +0000 UTC m=+692.350551902" observedRunningTime="2025-09-29 09:41:01.34348067 +0000 UTC m=+693.324804584" watchObservedRunningTime="2025-09-29 09:41:01.349112851 +0000 UTC m=+693.330436755" Sep 29 09:41:01 crc kubenswrapper[4779]: I0929 09:41:01.363701 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-bdlk5" podStartSLOduration=2.103163375 podStartE2EDuration="4.363676216s" podCreationTimestamp="2025-09-29 09:40:57 +0000 UTC" firstStartedPulling="2025-09-29 09:40:58.885498878 +0000 UTC m=+690.866822782" lastFinishedPulling="2025-09-29 09:41:01.146011719 +0000 UTC m=+693.127335623" observedRunningTime="2025-09-29 09:41:01.357715566 +0000 UTC m=+693.339039480" watchObservedRunningTime="2025-09-29 09:41:01.363676216 +0000 UTC m=+693.345000120" Sep 29 09:41:01 crc kubenswrapper[4779]: I0929 09:41:01.374345 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-w26jk" podStartSLOduration=2.027294152 podStartE2EDuration="4.374325829s" podCreationTimestamp="2025-09-29 09:40:57 +0000 UTC" firstStartedPulling="2025-09-29 09:40:58.003099246 +0000 UTC m=+689.984423150" lastFinishedPulling="2025-09-29 09:41:00.350130923 +0000 UTC m=+692.331454827" observedRunningTime="2025-09-29 09:41:01.373202537 +0000 UTC m=+693.354526461" watchObservedRunningTime="2025-09-29 09:41:01.374325829 +0000 UTC m=+693.355649733" Sep 29 09:41:05 crc kubenswrapper[4779]: I0929 09:41:05.360213 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-58fcddf996-mfvhk" event={"ID":"885dfe51-81d6-456c-b9df-ca6983913dd6","Type":"ContainerStarted","Data":"427c6e0d08a5d0b966e2ba86af1b1486e0e3280bbe18a7bb992f545e85903826"} Sep 29 09:41:05 crc kubenswrapper[4779]: I0929 09:41:05.379865 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-58fcddf996-mfvhk" podStartSLOduration=1.8065273469999998 podStartE2EDuration="8.379848491s" podCreationTimestamp="2025-09-29 09:40:57 +0000 UTC" firstStartedPulling="2025-09-29 09:40:58.193170456 +0000 UTC m=+690.174494360" lastFinishedPulling="2025-09-29 09:41:04.7664916 +0000 UTC m=+696.747815504" observedRunningTime="2025-09-29 09:41:05.377240347 +0000 UTC m=+697.358564261" watchObservedRunningTime="2025-09-29 09:41:05.379848491 +0000 UTC m=+697.361172405" Sep 29 09:41:07 crc kubenswrapper[4779]: I0929 09:41:07.983984 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-w26jk" Sep 29 09:41:08 crc kubenswrapper[4779]: I0929 09:41:08.276476 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-595988c68d-2g4sp" Sep 29 09:41:08 crc kubenswrapper[4779]: I0929 09:41:08.278981 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-595988c68d-2g4sp" Sep 29 09:41:08 crc kubenswrapper[4779]: I0929 09:41:08.285675 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-595988c68d-2g4sp" Sep 29 09:41:08 crc kubenswrapper[4779]: I0929 09:41:08.383496 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-595988c68d-2g4sp" Sep 29 09:41:08 crc kubenswrapper[4779]: I0929 09:41:08.448884 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-8gbfr"] Sep 29 09:41:17 crc kubenswrapper[4779]: I0929 09:41:17.970253 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-6d689559c5-k8phl" Sep 29 09:41:33 crc kubenswrapper[4779]: I0929 09:41:33.495699 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-8gbfr" podUID="b61fd91c-c774-44fd-9d5e-114aa59a1b39" containerName="console" containerID="cri-o://7439234fcab4979503e8f6767c413a80d89736e389849f65bbad9a1b2b0a59a8" gracePeriod=15 Sep 29 09:41:33 crc kubenswrapper[4779]: I0929 09:41:33.832138 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-8gbfr_b61fd91c-c774-44fd-9d5e-114aa59a1b39/console/0.log" Sep 29 09:41:33 crc kubenswrapper[4779]: I0929 09:41:33.832196 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-8gbfr" Sep 29 09:41:33 crc kubenswrapper[4779]: I0929 09:41:33.975784 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/b61fd91c-c774-44fd-9d5e-114aa59a1b39-console-config\") pod \"b61fd91c-c774-44fd-9d5e-114aa59a1b39\" (UID: \"b61fd91c-c774-44fd-9d5e-114aa59a1b39\") " Sep 29 09:41:33 crc kubenswrapper[4779]: I0929 09:41:33.976179 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wjvcq\" (UniqueName: \"kubernetes.io/projected/b61fd91c-c774-44fd-9d5e-114aa59a1b39-kube-api-access-wjvcq\") pod \"b61fd91c-c774-44fd-9d5e-114aa59a1b39\" (UID: \"b61fd91c-c774-44fd-9d5e-114aa59a1b39\") " Sep 29 09:41:33 crc kubenswrapper[4779]: I0929 09:41:33.976208 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/b61fd91c-c774-44fd-9d5e-114aa59a1b39-console-oauth-config\") pod \"b61fd91c-c774-44fd-9d5e-114aa59a1b39\" (UID: \"b61fd91c-c774-44fd-9d5e-114aa59a1b39\") " Sep 29 09:41:33 crc kubenswrapper[4779]: I0929 09:41:33.976233 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b61fd91c-c774-44fd-9d5e-114aa59a1b39-trusted-ca-bundle\") pod \"b61fd91c-c774-44fd-9d5e-114aa59a1b39\" (UID: \"b61fd91c-c774-44fd-9d5e-114aa59a1b39\") " Sep 29 09:41:33 crc kubenswrapper[4779]: I0929 09:41:33.976260 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/b61fd91c-c774-44fd-9d5e-114aa59a1b39-console-serving-cert\") pod \"b61fd91c-c774-44fd-9d5e-114aa59a1b39\" (UID: \"b61fd91c-c774-44fd-9d5e-114aa59a1b39\") " Sep 29 09:41:33 crc kubenswrapper[4779]: I0929 09:41:33.976319 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/b61fd91c-c774-44fd-9d5e-114aa59a1b39-service-ca\") pod \"b61fd91c-c774-44fd-9d5e-114aa59a1b39\" (UID: \"b61fd91c-c774-44fd-9d5e-114aa59a1b39\") " Sep 29 09:41:33 crc kubenswrapper[4779]: I0929 09:41:33.976403 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/b61fd91c-c774-44fd-9d5e-114aa59a1b39-oauth-serving-cert\") pod \"b61fd91c-c774-44fd-9d5e-114aa59a1b39\" (UID: \"b61fd91c-c774-44fd-9d5e-114aa59a1b39\") " Sep 29 09:41:33 crc kubenswrapper[4779]: I0929 09:41:33.976575 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b61fd91c-c774-44fd-9d5e-114aa59a1b39-console-config" (OuterVolumeSpecName: "console-config") pod "b61fd91c-c774-44fd-9d5e-114aa59a1b39" (UID: "b61fd91c-c774-44fd-9d5e-114aa59a1b39"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:41:33 crc kubenswrapper[4779]: I0929 09:41:33.976689 4779 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/b61fd91c-c774-44fd-9d5e-114aa59a1b39-console-config\") on node \"crc\" DevicePath \"\"" Sep 29 09:41:33 crc kubenswrapper[4779]: I0929 09:41:33.976986 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b61fd91c-c774-44fd-9d5e-114aa59a1b39-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "b61fd91c-c774-44fd-9d5e-114aa59a1b39" (UID: "b61fd91c-c774-44fd-9d5e-114aa59a1b39"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:41:33 crc kubenswrapper[4779]: I0929 09:41:33.977072 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b61fd91c-c774-44fd-9d5e-114aa59a1b39-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "b61fd91c-c774-44fd-9d5e-114aa59a1b39" (UID: "b61fd91c-c774-44fd-9d5e-114aa59a1b39"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:41:33 crc kubenswrapper[4779]: I0929 09:41:33.977142 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b61fd91c-c774-44fd-9d5e-114aa59a1b39-service-ca" (OuterVolumeSpecName: "service-ca") pod "b61fd91c-c774-44fd-9d5e-114aa59a1b39" (UID: "b61fd91c-c774-44fd-9d5e-114aa59a1b39"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:41:33 crc kubenswrapper[4779]: I0929 09:41:33.982073 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b61fd91c-c774-44fd-9d5e-114aa59a1b39-kube-api-access-wjvcq" (OuterVolumeSpecName: "kube-api-access-wjvcq") pod "b61fd91c-c774-44fd-9d5e-114aa59a1b39" (UID: "b61fd91c-c774-44fd-9d5e-114aa59a1b39"). InnerVolumeSpecName "kube-api-access-wjvcq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:41:33 crc kubenswrapper[4779]: I0929 09:41:33.987238 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b61fd91c-c774-44fd-9d5e-114aa59a1b39-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "b61fd91c-c774-44fd-9d5e-114aa59a1b39" (UID: "b61fd91c-c774-44fd-9d5e-114aa59a1b39"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:41:33 crc kubenswrapper[4779]: I0929 09:41:33.987658 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b61fd91c-c774-44fd-9d5e-114aa59a1b39-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "b61fd91c-c774-44fd-9d5e-114aa59a1b39" (UID: "b61fd91c-c774-44fd-9d5e-114aa59a1b39"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:41:34 crc kubenswrapper[4779]: I0929 09:41:34.073980 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96shphd"] Sep 29 09:41:34 crc kubenswrapper[4779]: E0929 09:41:34.074387 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b61fd91c-c774-44fd-9d5e-114aa59a1b39" containerName="console" Sep 29 09:41:34 crc kubenswrapper[4779]: I0929 09:41:34.074409 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="b61fd91c-c774-44fd-9d5e-114aa59a1b39" containerName="console" Sep 29 09:41:34 crc kubenswrapper[4779]: I0929 09:41:34.074613 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="b61fd91c-c774-44fd-9d5e-114aa59a1b39" containerName="console" Sep 29 09:41:34 crc kubenswrapper[4779]: I0929 09:41:34.075831 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96shphd" Sep 29 09:41:34 crc kubenswrapper[4779]: I0929 09:41:34.077985 4779 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/b61fd91c-c774-44fd-9d5e-114aa59a1b39-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 09:41:34 crc kubenswrapper[4779]: I0929 09:41:34.078010 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wjvcq\" (UniqueName: \"kubernetes.io/projected/b61fd91c-c774-44fd-9d5e-114aa59a1b39-kube-api-access-wjvcq\") on node \"crc\" DevicePath \"\"" Sep 29 09:41:34 crc kubenswrapper[4779]: I0929 09:41:34.078022 4779 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/b61fd91c-c774-44fd-9d5e-114aa59a1b39-console-oauth-config\") on node \"crc\" DevicePath \"\"" Sep 29 09:41:34 crc kubenswrapper[4779]: I0929 09:41:34.078030 4779 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b61fd91c-c774-44fd-9d5e-114aa59a1b39-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 09:41:34 crc kubenswrapper[4779]: I0929 09:41:34.078038 4779 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/b61fd91c-c774-44fd-9d5e-114aa59a1b39-console-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 09:41:34 crc kubenswrapper[4779]: I0929 09:41:34.078048 4779 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/b61fd91c-c774-44fd-9d5e-114aa59a1b39-service-ca\") on node \"crc\" DevicePath \"\"" Sep 29 09:41:34 crc kubenswrapper[4779]: I0929 09:41:34.079760 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Sep 29 09:41:34 crc kubenswrapper[4779]: I0929 09:41:34.084226 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96shphd"] Sep 29 09:41:34 crc kubenswrapper[4779]: I0929 09:41:34.180309 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7a6d49df-184b-486e-a7c6-5eb0aecab19f-bundle\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96shphd\" (UID: \"7a6d49df-184b-486e-a7c6-5eb0aecab19f\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96shphd" Sep 29 09:41:34 crc kubenswrapper[4779]: I0929 09:41:34.180374 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lh9p9\" (UniqueName: \"kubernetes.io/projected/7a6d49df-184b-486e-a7c6-5eb0aecab19f-kube-api-access-lh9p9\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96shphd\" (UID: \"7a6d49df-184b-486e-a7c6-5eb0aecab19f\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96shphd" Sep 29 09:41:34 crc kubenswrapper[4779]: I0929 09:41:34.180441 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7a6d49df-184b-486e-a7c6-5eb0aecab19f-util\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96shphd\" (UID: \"7a6d49df-184b-486e-a7c6-5eb0aecab19f\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96shphd" Sep 29 09:41:34 crc kubenswrapper[4779]: I0929 09:41:34.281415 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7a6d49df-184b-486e-a7c6-5eb0aecab19f-bundle\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96shphd\" (UID: \"7a6d49df-184b-486e-a7c6-5eb0aecab19f\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96shphd" Sep 29 09:41:34 crc kubenswrapper[4779]: I0929 09:41:34.281461 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lh9p9\" (UniqueName: \"kubernetes.io/projected/7a6d49df-184b-486e-a7c6-5eb0aecab19f-kube-api-access-lh9p9\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96shphd\" (UID: \"7a6d49df-184b-486e-a7c6-5eb0aecab19f\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96shphd" Sep 29 09:41:34 crc kubenswrapper[4779]: I0929 09:41:34.281506 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7a6d49df-184b-486e-a7c6-5eb0aecab19f-util\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96shphd\" (UID: \"7a6d49df-184b-486e-a7c6-5eb0aecab19f\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96shphd" Sep 29 09:41:34 crc kubenswrapper[4779]: I0929 09:41:34.282314 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7a6d49df-184b-486e-a7c6-5eb0aecab19f-util\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96shphd\" (UID: \"7a6d49df-184b-486e-a7c6-5eb0aecab19f\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96shphd" Sep 29 09:41:34 crc kubenswrapper[4779]: I0929 09:41:34.282323 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7a6d49df-184b-486e-a7c6-5eb0aecab19f-bundle\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96shphd\" (UID: \"7a6d49df-184b-486e-a7c6-5eb0aecab19f\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96shphd" Sep 29 09:41:34 crc kubenswrapper[4779]: I0929 09:41:34.296698 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lh9p9\" (UniqueName: \"kubernetes.io/projected/7a6d49df-184b-486e-a7c6-5eb0aecab19f-kube-api-access-lh9p9\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96shphd\" (UID: \"7a6d49df-184b-486e-a7c6-5eb0aecab19f\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96shphd" Sep 29 09:41:34 crc kubenswrapper[4779]: I0929 09:41:34.391610 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96shphd" Sep 29 09:41:34 crc kubenswrapper[4779]: I0929 09:41:34.562998 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-8gbfr_b61fd91c-c774-44fd-9d5e-114aa59a1b39/console/0.log" Sep 29 09:41:34 crc kubenswrapper[4779]: I0929 09:41:34.563063 4779 generic.go:334] "Generic (PLEG): container finished" podID="b61fd91c-c774-44fd-9d5e-114aa59a1b39" containerID="7439234fcab4979503e8f6767c413a80d89736e389849f65bbad9a1b2b0a59a8" exitCode=2 Sep 29 09:41:34 crc kubenswrapper[4779]: I0929 09:41:34.563111 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-8gbfr" Sep 29 09:41:34 crc kubenswrapper[4779]: I0929 09:41:34.563110 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-8gbfr" event={"ID":"b61fd91c-c774-44fd-9d5e-114aa59a1b39","Type":"ContainerDied","Data":"7439234fcab4979503e8f6767c413a80d89736e389849f65bbad9a1b2b0a59a8"} Sep 29 09:41:34 crc kubenswrapper[4779]: I0929 09:41:34.563287 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-8gbfr" event={"ID":"b61fd91c-c774-44fd-9d5e-114aa59a1b39","Type":"ContainerDied","Data":"5c61308255e5bd53a2f972577493acef5021829db0cf3ecaeed10bc46d7cb337"} Sep 29 09:41:34 crc kubenswrapper[4779]: I0929 09:41:34.563310 4779 scope.go:117] "RemoveContainer" containerID="7439234fcab4979503e8f6767c413a80d89736e389849f65bbad9a1b2b0a59a8" Sep 29 09:41:34 crc kubenswrapper[4779]: I0929 09:41:34.591716 4779 scope.go:117] "RemoveContainer" containerID="7439234fcab4979503e8f6767c413a80d89736e389849f65bbad9a1b2b0a59a8" Sep 29 09:41:34 crc kubenswrapper[4779]: E0929 09:41:34.592210 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7439234fcab4979503e8f6767c413a80d89736e389849f65bbad9a1b2b0a59a8\": container with ID starting with 7439234fcab4979503e8f6767c413a80d89736e389849f65bbad9a1b2b0a59a8 not found: ID does not exist" containerID="7439234fcab4979503e8f6767c413a80d89736e389849f65bbad9a1b2b0a59a8" Sep 29 09:41:34 crc kubenswrapper[4779]: I0929 09:41:34.592257 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7439234fcab4979503e8f6767c413a80d89736e389849f65bbad9a1b2b0a59a8"} err="failed to get container status \"7439234fcab4979503e8f6767c413a80d89736e389849f65bbad9a1b2b0a59a8\": rpc error: code = NotFound desc = could not find container \"7439234fcab4979503e8f6767c413a80d89736e389849f65bbad9a1b2b0a59a8\": container with ID starting with 7439234fcab4979503e8f6767c413a80d89736e389849f65bbad9a1b2b0a59a8 not found: ID does not exist" Sep 29 09:41:34 crc kubenswrapper[4779]: I0929 09:41:34.593281 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-8gbfr"] Sep 29 09:41:34 crc kubenswrapper[4779]: I0929 09:41:34.598252 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-8gbfr"] Sep 29 09:41:34 crc kubenswrapper[4779]: I0929 09:41:34.720793 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b61fd91c-c774-44fd-9d5e-114aa59a1b39" path="/var/lib/kubelet/pods/b61fd91c-c774-44fd-9d5e-114aa59a1b39/volumes" Sep 29 09:41:34 crc kubenswrapper[4779]: I0929 09:41:34.812280 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96shphd"] Sep 29 09:41:35 crc kubenswrapper[4779]: I0929 09:41:35.575283 4779 generic.go:334] "Generic (PLEG): container finished" podID="7a6d49df-184b-486e-a7c6-5eb0aecab19f" containerID="8cc5f1a2ed2d83752e0e8c843394a13ddad5e462fb3a3407a2254924077328b7" exitCode=0 Sep 29 09:41:35 crc kubenswrapper[4779]: I0929 09:41:35.575353 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96shphd" event={"ID":"7a6d49df-184b-486e-a7c6-5eb0aecab19f","Type":"ContainerDied","Data":"8cc5f1a2ed2d83752e0e8c843394a13ddad5e462fb3a3407a2254924077328b7"} Sep 29 09:41:35 crc kubenswrapper[4779]: I0929 09:41:35.575606 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96shphd" event={"ID":"7a6d49df-184b-486e-a7c6-5eb0aecab19f","Type":"ContainerStarted","Data":"c8d89872b81fa8d8b561e64f54a3b127dddc57b08d3e0f97203a3716330dc558"} Sep 29 09:41:37 crc kubenswrapper[4779]: I0929 09:41:37.588481 4779 generic.go:334] "Generic (PLEG): container finished" podID="7a6d49df-184b-486e-a7c6-5eb0aecab19f" containerID="0d84718c5ca0ca19432f4b4bb22753b38c2c6b88af052d2b008e6ff2ecedc882" exitCode=0 Sep 29 09:41:37 crc kubenswrapper[4779]: I0929 09:41:37.588771 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96shphd" event={"ID":"7a6d49df-184b-486e-a7c6-5eb0aecab19f","Type":"ContainerDied","Data":"0d84718c5ca0ca19432f4b4bb22753b38c2c6b88af052d2b008e6ff2ecedc882"} Sep 29 09:41:38 crc kubenswrapper[4779]: I0929 09:41:38.596745 4779 generic.go:334] "Generic (PLEG): container finished" podID="7a6d49df-184b-486e-a7c6-5eb0aecab19f" containerID="497cd4c50e6e4bfcabfc0e21994211438b7ae0f408e9aced44e2cb4f6795a1cc" exitCode=0 Sep 29 09:41:38 crc kubenswrapper[4779]: I0929 09:41:38.596848 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96shphd" event={"ID":"7a6d49df-184b-486e-a7c6-5eb0aecab19f","Type":"ContainerDied","Data":"497cd4c50e6e4bfcabfc0e21994211438b7ae0f408e9aced44e2cb4f6795a1cc"} Sep 29 09:41:39 crc kubenswrapper[4779]: I0929 09:41:39.857600 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96shphd" Sep 29 09:41:39 crc kubenswrapper[4779]: I0929 09:41:39.869680 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7a6d49df-184b-486e-a7c6-5eb0aecab19f-bundle\") pod \"7a6d49df-184b-486e-a7c6-5eb0aecab19f\" (UID: \"7a6d49df-184b-486e-a7c6-5eb0aecab19f\") " Sep 29 09:41:39 crc kubenswrapper[4779]: I0929 09:41:39.869839 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7a6d49df-184b-486e-a7c6-5eb0aecab19f-util\") pod \"7a6d49df-184b-486e-a7c6-5eb0aecab19f\" (UID: \"7a6d49df-184b-486e-a7c6-5eb0aecab19f\") " Sep 29 09:41:39 crc kubenswrapper[4779]: I0929 09:41:39.869887 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lh9p9\" (UniqueName: \"kubernetes.io/projected/7a6d49df-184b-486e-a7c6-5eb0aecab19f-kube-api-access-lh9p9\") pod \"7a6d49df-184b-486e-a7c6-5eb0aecab19f\" (UID: \"7a6d49df-184b-486e-a7c6-5eb0aecab19f\") " Sep 29 09:41:39 crc kubenswrapper[4779]: I0929 09:41:39.870532 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7a6d49df-184b-486e-a7c6-5eb0aecab19f-bundle" (OuterVolumeSpecName: "bundle") pod "7a6d49df-184b-486e-a7c6-5eb0aecab19f" (UID: "7a6d49df-184b-486e-a7c6-5eb0aecab19f"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:41:39 crc kubenswrapper[4779]: I0929 09:41:39.871632 4779 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7a6d49df-184b-486e-a7c6-5eb0aecab19f-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 09:41:39 crc kubenswrapper[4779]: I0929 09:41:39.875845 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a6d49df-184b-486e-a7c6-5eb0aecab19f-kube-api-access-lh9p9" (OuterVolumeSpecName: "kube-api-access-lh9p9") pod "7a6d49df-184b-486e-a7c6-5eb0aecab19f" (UID: "7a6d49df-184b-486e-a7c6-5eb0aecab19f"). InnerVolumeSpecName "kube-api-access-lh9p9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:41:39 crc kubenswrapper[4779]: I0929 09:41:39.904878 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7a6d49df-184b-486e-a7c6-5eb0aecab19f-util" (OuterVolumeSpecName: "util") pod "7a6d49df-184b-486e-a7c6-5eb0aecab19f" (UID: "7a6d49df-184b-486e-a7c6-5eb0aecab19f"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:41:39 crc kubenswrapper[4779]: I0929 09:41:39.973803 4779 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7a6d49df-184b-486e-a7c6-5eb0aecab19f-util\") on node \"crc\" DevicePath \"\"" Sep 29 09:41:39 crc kubenswrapper[4779]: I0929 09:41:39.973848 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lh9p9\" (UniqueName: \"kubernetes.io/projected/7a6d49df-184b-486e-a7c6-5eb0aecab19f-kube-api-access-lh9p9\") on node \"crc\" DevicePath \"\"" Sep 29 09:41:40 crc kubenswrapper[4779]: I0929 09:41:40.613759 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96shphd" event={"ID":"7a6d49df-184b-486e-a7c6-5eb0aecab19f","Type":"ContainerDied","Data":"c8d89872b81fa8d8b561e64f54a3b127dddc57b08d3e0f97203a3716330dc558"} Sep 29 09:41:40 crc kubenswrapper[4779]: I0929 09:41:40.613804 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c8d89872b81fa8d8b561e64f54a3b127dddc57b08d3e0f97203a3716330dc558" Sep 29 09:41:40 crc kubenswrapper[4779]: I0929 09:41:40.613922 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96shphd" Sep 29 09:41:49 crc kubenswrapper[4779]: I0929 09:41:49.522277 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-4m9cx"] Sep 29 09:41:49 crc kubenswrapper[4779]: I0929 09:41:49.523090 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4m9cx" podUID="ac283e2f-8cf1-4fae-ab94-8b4d9b57ac0c" containerName="route-controller-manager" containerID="cri-o://591572edb7c09b2f9656067b550b5bf635c3359f5e27806d30fa5b66e11027f1" gracePeriod=30 Sep 29 09:41:49 crc kubenswrapper[4779]: I0929 09:41:49.530683 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-j6lzv"] Sep 29 09:41:49 crc kubenswrapper[4779]: I0929 09:41:49.530945 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-j6lzv" podUID="66b2a431-ff77-457e-99aa-d4dd40bc4640" containerName="controller-manager" containerID="cri-o://74e5b6fca00eef71d1d5dd2c0d79596d5199414727042fd70d6f950c7b8b694e" gracePeriod=30 Sep 29 09:41:49 crc kubenswrapper[4779]: I0929 09:41:49.681533 4779 generic.go:334] "Generic (PLEG): container finished" podID="66b2a431-ff77-457e-99aa-d4dd40bc4640" containerID="74e5b6fca00eef71d1d5dd2c0d79596d5199414727042fd70d6f950c7b8b694e" exitCode=0 Sep 29 09:41:49 crc kubenswrapper[4779]: I0929 09:41:49.681597 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-j6lzv" event={"ID":"66b2a431-ff77-457e-99aa-d4dd40bc4640","Type":"ContainerDied","Data":"74e5b6fca00eef71d1d5dd2c0d79596d5199414727042fd70d6f950c7b8b694e"} Sep 29 09:41:49 crc kubenswrapper[4779]: I0929 09:41:49.683871 4779 generic.go:334] "Generic (PLEG): container finished" podID="ac283e2f-8cf1-4fae-ab94-8b4d9b57ac0c" containerID="591572edb7c09b2f9656067b550b5bf635c3359f5e27806d30fa5b66e11027f1" exitCode=0 Sep 29 09:41:49 crc kubenswrapper[4779]: I0929 09:41:49.683946 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4m9cx" event={"ID":"ac283e2f-8cf1-4fae-ab94-8b4d9b57ac0c","Type":"ContainerDied","Data":"591572edb7c09b2f9656067b550b5bf635c3359f5e27806d30fa5b66e11027f1"} Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.006063 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4m9cx" Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.007745 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-j6lzv" Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.112240 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/66b2a431-ff77-457e-99aa-d4dd40bc4640-config\") pod \"66b2a431-ff77-457e-99aa-d4dd40bc4640\" (UID: \"66b2a431-ff77-457e-99aa-d4dd40bc4640\") " Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.112315 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ac283e2f-8cf1-4fae-ab94-8b4d9b57ac0c-client-ca\") pod \"ac283e2f-8cf1-4fae-ab94-8b4d9b57ac0c\" (UID: \"ac283e2f-8cf1-4fae-ab94-8b4d9b57ac0c\") " Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.112345 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/66b2a431-ff77-457e-99aa-d4dd40bc4640-proxy-ca-bundles\") pod \"66b2a431-ff77-457e-99aa-d4dd40bc4640\" (UID: \"66b2a431-ff77-457e-99aa-d4dd40bc4640\") " Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.112370 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/66b2a431-ff77-457e-99aa-d4dd40bc4640-client-ca\") pod \"66b2a431-ff77-457e-99aa-d4dd40bc4640\" (UID: \"66b2a431-ff77-457e-99aa-d4dd40bc4640\") " Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.112393 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ac283e2f-8cf1-4fae-ab94-8b4d9b57ac0c-serving-cert\") pod \"ac283e2f-8cf1-4fae-ab94-8b4d9b57ac0c\" (UID: \"ac283e2f-8cf1-4fae-ab94-8b4d9b57ac0c\") " Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.112415 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/66b2a431-ff77-457e-99aa-d4dd40bc4640-serving-cert\") pod \"66b2a431-ff77-457e-99aa-d4dd40bc4640\" (UID: \"66b2a431-ff77-457e-99aa-d4dd40bc4640\") " Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.112460 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngnl4\" (UniqueName: \"kubernetes.io/projected/66b2a431-ff77-457e-99aa-d4dd40bc4640-kube-api-access-ngnl4\") pod \"66b2a431-ff77-457e-99aa-d4dd40bc4640\" (UID: \"66b2a431-ff77-457e-99aa-d4dd40bc4640\") " Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.112502 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac283e2f-8cf1-4fae-ab94-8b4d9b57ac0c-config\") pod \"ac283e2f-8cf1-4fae-ab94-8b4d9b57ac0c\" (UID: \"ac283e2f-8cf1-4fae-ab94-8b4d9b57ac0c\") " Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.112554 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hp25w\" (UniqueName: \"kubernetes.io/projected/ac283e2f-8cf1-4fae-ab94-8b4d9b57ac0c-kube-api-access-hp25w\") pod \"ac283e2f-8cf1-4fae-ab94-8b4d9b57ac0c\" (UID: \"ac283e2f-8cf1-4fae-ab94-8b4d9b57ac0c\") " Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.113087 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/66b2a431-ff77-457e-99aa-d4dd40bc4640-client-ca" (OuterVolumeSpecName: "client-ca") pod "66b2a431-ff77-457e-99aa-d4dd40bc4640" (UID: "66b2a431-ff77-457e-99aa-d4dd40bc4640"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.113139 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/66b2a431-ff77-457e-99aa-d4dd40bc4640-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "66b2a431-ff77-457e-99aa-d4dd40bc4640" (UID: "66b2a431-ff77-457e-99aa-d4dd40bc4640"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.113362 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ac283e2f-8cf1-4fae-ab94-8b4d9b57ac0c-client-ca" (OuterVolumeSpecName: "client-ca") pod "ac283e2f-8cf1-4fae-ab94-8b4d9b57ac0c" (UID: "ac283e2f-8cf1-4fae-ab94-8b4d9b57ac0c"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.113514 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/66b2a431-ff77-457e-99aa-d4dd40bc4640-config" (OuterVolumeSpecName: "config") pod "66b2a431-ff77-457e-99aa-d4dd40bc4640" (UID: "66b2a431-ff77-457e-99aa-d4dd40bc4640"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.114242 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ac283e2f-8cf1-4fae-ab94-8b4d9b57ac0c-config" (OuterVolumeSpecName: "config") pod "ac283e2f-8cf1-4fae-ab94-8b4d9b57ac0c" (UID: "ac283e2f-8cf1-4fae-ab94-8b4d9b57ac0c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.118483 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66b2a431-ff77-457e-99aa-d4dd40bc4640-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "66b2a431-ff77-457e-99aa-d4dd40bc4640" (UID: "66b2a431-ff77-457e-99aa-d4dd40bc4640"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.119054 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/66b2a431-ff77-457e-99aa-d4dd40bc4640-kube-api-access-ngnl4" (OuterVolumeSpecName: "kube-api-access-ngnl4") pod "66b2a431-ff77-457e-99aa-d4dd40bc4640" (UID: "66b2a431-ff77-457e-99aa-d4dd40bc4640"). InnerVolumeSpecName "kube-api-access-ngnl4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.119501 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ac283e2f-8cf1-4fae-ab94-8b4d9b57ac0c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "ac283e2f-8cf1-4fae-ab94-8b4d9b57ac0c" (UID: "ac283e2f-8cf1-4fae-ab94-8b4d9b57ac0c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.120204 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac283e2f-8cf1-4fae-ab94-8b4d9b57ac0c-kube-api-access-hp25w" (OuterVolumeSpecName: "kube-api-access-hp25w") pod "ac283e2f-8cf1-4fae-ab94-8b4d9b57ac0c" (UID: "ac283e2f-8cf1-4fae-ab94-8b4d9b57ac0c"). InnerVolumeSpecName "kube-api-access-hp25w". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.199412 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-7bbfcbc847-hd9g5"] Sep 29 09:41:50 crc kubenswrapper[4779]: E0929 09:41:50.199677 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a6d49df-184b-486e-a7c6-5eb0aecab19f" containerName="extract" Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.199692 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a6d49df-184b-486e-a7c6-5eb0aecab19f" containerName="extract" Sep 29 09:41:50 crc kubenswrapper[4779]: E0929 09:41:50.199704 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a6d49df-184b-486e-a7c6-5eb0aecab19f" containerName="util" Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.199712 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a6d49df-184b-486e-a7c6-5eb0aecab19f" containerName="util" Sep 29 09:41:50 crc kubenswrapper[4779]: E0929 09:41:50.199735 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66b2a431-ff77-457e-99aa-d4dd40bc4640" containerName="controller-manager" Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.199743 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="66b2a431-ff77-457e-99aa-d4dd40bc4640" containerName="controller-manager" Sep 29 09:41:50 crc kubenswrapper[4779]: E0929 09:41:50.199753 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac283e2f-8cf1-4fae-ab94-8b4d9b57ac0c" containerName="route-controller-manager" Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.199761 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac283e2f-8cf1-4fae-ab94-8b4d9b57ac0c" containerName="route-controller-manager" Sep 29 09:41:50 crc kubenswrapper[4779]: E0929 09:41:50.199773 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a6d49df-184b-486e-a7c6-5eb0aecab19f" containerName="pull" Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.199780 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a6d49df-184b-486e-a7c6-5eb0aecab19f" containerName="pull" Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.199923 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a6d49df-184b-486e-a7c6-5eb0aecab19f" containerName="extract" Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.199942 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac283e2f-8cf1-4fae-ab94-8b4d9b57ac0c" containerName="route-controller-manager" Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.199954 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="66b2a431-ff77-457e-99aa-d4dd40bc4640" containerName="controller-manager" Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.200461 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7bbfcbc847-hd9g5" Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.213860 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngnl4\" (UniqueName: \"kubernetes.io/projected/66b2a431-ff77-457e-99aa-d4dd40bc4640-kube-api-access-ngnl4\") on node \"crc\" DevicePath \"\"" Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.213899 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac283e2f-8cf1-4fae-ab94-8b4d9b57ac0c-config\") on node \"crc\" DevicePath \"\"" Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.213929 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hp25w\" (UniqueName: \"kubernetes.io/projected/ac283e2f-8cf1-4fae-ab94-8b4d9b57ac0c-kube-api-access-hp25w\") on node \"crc\" DevicePath \"\"" Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.213940 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/66b2a431-ff77-457e-99aa-d4dd40bc4640-config\") on node \"crc\" DevicePath \"\"" Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.213952 4779 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ac283e2f-8cf1-4fae-ab94-8b4d9b57ac0c-client-ca\") on node \"crc\" DevicePath \"\"" Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.213965 4779 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/66b2a431-ff77-457e-99aa-d4dd40bc4640-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.213978 4779 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/66b2a431-ff77-457e-99aa-d4dd40bc4640-client-ca\") on node \"crc\" DevicePath \"\"" Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.213989 4779 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ac283e2f-8cf1-4fae-ab94-8b4d9b57ac0c-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.214000 4779 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/66b2a431-ff77-457e-99aa-d4dd40bc4640-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.218945 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7bbfcbc847-hd9g5"] Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.289095 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-554749994f-78nvx"] Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.289875 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-554749994f-78nvx" Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.310876 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-554749994f-78nvx"] Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.314969 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7jpc4\" (UniqueName: \"kubernetes.io/projected/c80536ab-1c0d-4839-9ad7-4baaf91d4955-kube-api-access-7jpc4\") pod \"controller-manager-7bbfcbc847-hd9g5\" (UID: \"c80536ab-1c0d-4839-9ad7-4baaf91d4955\") " pod="openshift-controller-manager/controller-manager-7bbfcbc847-hd9g5" Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.315027 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c80536ab-1c0d-4839-9ad7-4baaf91d4955-config\") pod \"controller-manager-7bbfcbc847-hd9g5\" (UID: \"c80536ab-1c0d-4839-9ad7-4baaf91d4955\") " pod="openshift-controller-manager/controller-manager-7bbfcbc847-hd9g5" Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.315069 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c80536ab-1c0d-4839-9ad7-4baaf91d4955-client-ca\") pod \"controller-manager-7bbfcbc847-hd9g5\" (UID: \"c80536ab-1c0d-4839-9ad7-4baaf91d4955\") " pod="openshift-controller-manager/controller-manager-7bbfcbc847-hd9g5" Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.315121 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c80536ab-1c0d-4839-9ad7-4baaf91d4955-proxy-ca-bundles\") pod \"controller-manager-7bbfcbc847-hd9g5\" (UID: \"c80536ab-1c0d-4839-9ad7-4baaf91d4955\") " pod="openshift-controller-manager/controller-manager-7bbfcbc847-hd9g5" Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.315175 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c80536ab-1c0d-4839-9ad7-4baaf91d4955-serving-cert\") pod \"controller-manager-7bbfcbc847-hd9g5\" (UID: \"c80536ab-1c0d-4839-9ad7-4baaf91d4955\") " pod="openshift-controller-manager/controller-manager-7bbfcbc847-hd9g5" Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.416410 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7jpc4\" (UniqueName: \"kubernetes.io/projected/c80536ab-1c0d-4839-9ad7-4baaf91d4955-kube-api-access-7jpc4\") pod \"controller-manager-7bbfcbc847-hd9g5\" (UID: \"c80536ab-1c0d-4839-9ad7-4baaf91d4955\") " pod="openshift-controller-manager/controller-manager-7bbfcbc847-hd9g5" Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.416464 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c80536ab-1c0d-4839-9ad7-4baaf91d4955-config\") pod \"controller-manager-7bbfcbc847-hd9g5\" (UID: \"c80536ab-1c0d-4839-9ad7-4baaf91d4955\") " pod="openshift-controller-manager/controller-manager-7bbfcbc847-hd9g5" Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.416493 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c80536ab-1c0d-4839-9ad7-4baaf91d4955-client-ca\") pod \"controller-manager-7bbfcbc847-hd9g5\" (UID: \"c80536ab-1c0d-4839-9ad7-4baaf91d4955\") " pod="openshift-controller-manager/controller-manager-7bbfcbc847-hd9g5" Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.416513 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c80536ab-1c0d-4839-9ad7-4baaf91d4955-proxy-ca-bundles\") pod \"controller-manager-7bbfcbc847-hd9g5\" (UID: \"c80536ab-1c0d-4839-9ad7-4baaf91d4955\") " pod="openshift-controller-manager/controller-manager-7bbfcbc847-hd9g5" Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.416532 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c80536ab-1c0d-4839-9ad7-4baaf91d4955-serving-cert\") pod \"controller-manager-7bbfcbc847-hd9g5\" (UID: \"c80536ab-1c0d-4839-9ad7-4baaf91d4955\") " pod="openshift-controller-manager/controller-manager-7bbfcbc847-hd9g5" Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.416550 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fk54n\" (UniqueName: \"kubernetes.io/projected/950810c1-70c1-4ee4-8bb9-7c727864b775-kube-api-access-fk54n\") pod \"route-controller-manager-554749994f-78nvx\" (UID: \"950810c1-70c1-4ee4-8bb9-7c727864b775\") " pod="openshift-route-controller-manager/route-controller-manager-554749994f-78nvx" Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.416569 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/950810c1-70c1-4ee4-8bb9-7c727864b775-config\") pod \"route-controller-manager-554749994f-78nvx\" (UID: \"950810c1-70c1-4ee4-8bb9-7c727864b775\") " pod="openshift-route-controller-manager/route-controller-manager-554749994f-78nvx" Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.416589 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/950810c1-70c1-4ee4-8bb9-7c727864b775-serving-cert\") pod \"route-controller-manager-554749994f-78nvx\" (UID: \"950810c1-70c1-4ee4-8bb9-7c727864b775\") " pod="openshift-route-controller-manager/route-controller-manager-554749994f-78nvx" Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.416624 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/950810c1-70c1-4ee4-8bb9-7c727864b775-client-ca\") pod \"route-controller-manager-554749994f-78nvx\" (UID: \"950810c1-70c1-4ee4-8bb9-7c727864b775\") " pod="openshift-route-controller-manager/route-controller-manager-554749994f-78nvx" Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.417549 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c80536ab-1c0d-4839-9ad7-4baaf91d4955-client-ca\") pod \"controller-manager-7bbfcbc847-hd9g5\" (UID: \"c80536ab-1c0d-4839-9ad7-4baaf91d4955\") " pod="openshift-controller-manager/controller-manager-7bbfcbc847-hd9g5" Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.417736 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c80536ab-1c0d-4839-9ad7-4baaf91d4955-proxy-ca-bundles\") pod \"controller-manager-7bbfcbc847-hd9g5\" (UID: \"c80536ab-1c0d-4839-9ad7-4baaf91d4955\") " pod="openshift-controller-manager/controller-manager-7bbfcbc847-hd9g5" Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.419769 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c80536ab-1c0d-4839-9ad7-4baaf91d4955-config\") pod \"controller-manager-7bbfcbc847-hd9g5\" (UID: \"c80536ab-1c0d-4839-9ad7-4baaf91d4955\") " pod="openshift-controller-manager/controller-manager-7bbfcbc847-hd9g5" Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.421897 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c80536ab-1c0d-4839-9ad7-4baaf91d4955-serving-cert\") pod \"controller-manager-7bbfcbc847-hd9g5\" (UID: \"c80536ab-1c0d-4839-9ad7-4baaf91d4955\") " pod="openshift-controller-manager/controller-manager-7bbfcbc847-hd9g5" Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.433947 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7jpc4\" (UniqueName: \"kubernetes.io/projected/c80536ab-1c0d-4839-9ad7-4baaf91d4955-kube-api-access-7jpc4\") pod \"controller-manager-7bbfcbc847-hd9g5\" (UID: \"c80536ab-1c0d-4839-9ad7-4baaf91d4955\") " pod="openshift-controller-manager/controller-manager-7bbfcbc847-hd9g5" Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.515033 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7bbfcbc847-hd9g5" Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.518061 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fk54n\" (UniqueName: \"kubernetes.io/projected/950810c1-70c1-4ee4-8bb9-7c727864b775-kube-api-access-fk54n\") pod \"route-controller-manager-554749994f-78nvx\" (UID: \"950810c1-70c1-4ee4-8bb9-7c727864b775\") " pod="openshift-route-controller-manager/route-controller-manager-554749994f-78nvx" Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.518111 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/950810c1-70c1-4ee4-8bb9-7c727864b775-config\") pod \"route-controller-manager-554749994f-78nvx\" (UID: \"950810c1-70c1-4ee4-8bb9-7c727864b775\") " pod="openshift-route-controller-manager/route-controller-manager-554749994f-78nvx" Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.518143 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/950810c1-70c1-4ee4-8bb9-7c727864b775-serving-cert\") pod \"route-controller-manager-554749994f-78nvx\" (UID: \"950810c1-70c1-4ee4-8bb9-7c727864b775\") " pod="openshift-route-controller-manager/route-controller-manager-554749994f-78nvx" Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.518178 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/950810c1-70c1-4ee4-8bb9-7c727864b775-client-ca\") pod \"route-controller-manager-554749994f-78nvx\" (UID: \"950810c1-70c1-4ee4-8bb9-7c727864b775\") " pod="openshift-route-controller-manager/route-controller-manager-554749994f-78nvx" Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.519134 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/950810c1-70c1-4ee4-8bb9-7c727864b775-client-ca\") pod \"route-controller-manager-554749994f-78nvx\" (UID: \"950810c1-70c1-4ee4-8bb9-7c727864b775\") " pod="openshift-route-controller-manager/route-controller-manager-554749994f-78nvx" Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.519311 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/950810c1-70c1-4ee4-8bb9-7c727864b775-config\") pod \"route-controller-manager-554749994f-78nvx\" (UID: \"950810c1-70c1-4ee4-8bb9-7c727864b775\") " pod="openshift-route-controller-manager/route-controller-manager-554749994f-78nvx" Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.523456 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/950810c1-70c1-4ee4-8bb9-7c727864b775-serving-cert\") pod \"route-controller-manager-554749994f-78nvx\" (UID: \"950810c1-70c1-4ee4-8bb9-7c727864b775\") " pod="openshift-route-controller-manager/route-controller-manager-554749994f-78nvx" Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.543229 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fk54n\" (UniqueName: \"kubernetes.io/projected/950810c1-70c1-4ee4-8bb9-7c727864b775-kube-api-access-fk54n\") pod \"route-controller-manager-554749994f-78nvx\" (UID: \"950810c1-70c1-4ee4-8bb9-7c727864b775\") " pod="openshift-route-controller-manager/route-controller-manager-554749994f-78nvx" Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.604470 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-554749994f-78nvx" Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.694296 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4m9cx" event={"ID":"ac283e2f-8cf1-4fae-ab94-8b4d9b57ac0c","Type":"ContainerDied","Data":"8090d23a1def5249eacc631662414b03461acb5d1c74ffc75786dc2380099e9a"} Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.694611 4779 scope.go:117] "RemoveContainer" containerID="591572edb7c09b2f9656067b550b5bf635c3359f5e27806d30fa5b66e11027f1" Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.694728 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4m9cx" Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.712631 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-j6lzv" event={"ID":"66b2a431-ff77-457e-99aa-d4dd40bc4640","Type":"ContainerDied","Data":"9b6f39a6656ebede26a83c06c747133258dfc361cd89b39ba51c1713ccb0109c"} Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.712709 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-j6lzv" Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.750682 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-4m9cx"] Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.751008 4779 scope.go:117] "RemoveContainer" containerID="74e5b6fca00eef71d1d5dd2c0d79596d5199414727042fd70d6f950c7b8b694e" Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.757246 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-4m9cx"] Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.775846 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-j6lzv"] Sep 29 09:41:50 crc kubenswrapper[4779]: I0929 09:41:50.781727 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-j6lzv"] Sep 29 09:41:51 crc kubenswrapper[4779]: I0929 09:41:51.062432 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7bbfcbc847-hd9g5"] Sep 29 09:41:51 crc kubenswrapper[4779]: I0929 09:41:51.088538 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-554749994f-78nvx"] Sep 29 09:41:51 crc kubenswrapper[4779]: W0929 09:41:51.098462 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod950810c1_70c1_4ee4_8bb9_7c727864b775.slice/crio-b8dc7ead8522723877903bf29b4087d046afe2acd27dfb300a7ed269b2a4db87 WatchSource:0}: Error finding container b8dc7ead8522723877903bf29b4087d046afe2acd27dfb300a7ed269b2a4db87: Status 404 returned error can't find the container with id b8dc7ead8522723877903bf29b4087d046afe2acd27dfb300a7ed269b2a4db87 Sep 29 09:41:51 crc kubenswrapper[4779]: I0929 09:41:51.721773 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-554749994f-78nvx" event={"ID":"950810c1-70c1-4ee4-8bb9-7c727864b775","Type":"ContainerStarted","Data":"3974126110b79616dff87fa47c72478e76acbd78e002b920215d121ac0611044"} Sep 29 09:41:51 crc kubenswrapper[4779]: I0929 09:41:51.721820 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-554749994f-78nvx" event={"ID":"950810c1-70c1-4ee4-8bb9-7c727864b775","Type":"ContainerStarted","Data":"b8dc7ead8522723877903bf29b4087d046afe2acd27dfb300a7ed269b2a4db87"} Sep 29 09:41:51 crc kubenswrapper[4779]: I0929 09:41:51.722002 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-554749994f-78nvx" Sep 29 09:41:51 crc kubenswrapper[4779]: I0929 09:41:51.723053 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7bbfcbc847-hd9g5" event={"ID":"c80536ab-1c0d-4839-9ad7-4baaf91d4955","Type":"ContainerStarted","Data":"c9cdce3f3d051fd54561ecc1ca719c47df8671d3b686a2adcfbdf6b50c0bbe21"} Sep 29 09:41:51 crc kubenswrapper[4779]: I0929 09:41:51.723359 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7bbfcbc847-hd9g5" event={"ID":"c80536ab-1c0d-4839-9ad7-4baaf91d4955","Type":"ContainerStarted","Data":"2e474b3b235a8d2a02c7eb751757687846d5cdf240d6602d34edd2fdd896c8d3"} Sep 29 09:41:51 crc kubenswrapper[4779]: I0929 09:41:51.724076 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-7bbfcbc847-hd9g5" Sep 29 09:41:51 crc kubenswrapper[4779]: I0929 09:41:51.727739 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-554749994f-78nvx" Sep 29 09:41:51 crc kubenswrapper[4779]: I0929 09:41:51.731371 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-7bbfcbc847-hd9g5" Sep 29 09:41:51 crc kubenswrapper[4779]: I0929 09:41:51.741990 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-554749994f-78nvx" podStartSLOduration=1.741978585 podStartE2EDuration="1.741978585s" podCreationTimestamp="2025-09-29 09:41:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:41:51.740928996 +0000 UTC m=+743.722252900" watchObservedRunningTime="2025-09-29 09:41:51.741978585 +0000 UTC m=+743.723302489" Sep 29 09:41:51 crc kubenswrapper[4779]: I0929 09:41:51.762559 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-7bbfcbc847-hd9g5" podStartSLOduration=1.762544279 podStartE2EDuration="1.762544279s" podCreationTimestamp="2025-09-29 09:41:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:41:51.75826785 +0000 UTC m=+743.739591754" watchObservedRunningTime="2025-09-29 09:41:51.762544279 +0000 UTC m=+743.743868183" Sep 29 09:41:52 crc kubenswrapper[4779]: I0929 09:41:52.726818 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="66b2a431-ff77-457e-99aa-d4dd40bc4640" path="/var/lib/kubelet/pods/66b2a431-ff77-457e-99aa-d4dd40bc4640/volumes" Sep 29 09:41:52 crc kubenswrapper[4779]: I0929 09:41:52.727558 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ac283e2f-8cf1-4fae-ab94-8b4d9b57ac0c" path="/var/lib/kubelet/pods/ac283e2f-8cf1-4fae-ab94-8b4d9b57ac0c/volumes" Sep 29 09:41:53 crc kubenswrapper[4779]: I0929 09:41:53.989142 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-5666db5b47-2v9x9"] Sep 29 09:41:53 crc kubenswrapper[4779]: I0929 09:41:53.990255 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-5666db5b47-2v9x9" Sep 29 09:41:53 crc kubenswrapper[4779]: I0929 09:41:53.993301 4779 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Sep 29 09:41:53 crc kubenswrapper[4779]: I0929 09:41:53.993503 4779 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Sep 29 09:41:53 crc kubenswrapper[4779]: I0929 09:41:53.994216 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Sep 29 09:41:53 crc kubenswrapper[4779]: I0929 09:41:53.994232 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Sep 29 09:41:53 crc kubenswrapper[4779]: I0929 09:41:53.994786 4779 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-6mxxv" Sep 29 09:41:54 crc kubenswrapper[4779]: I0929 09:41:54.000707 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-5666db5b47-2v9x9"] Sep 29 09:41:54 crc kubenswrapper[4779]: I0929 09:41:54.067767 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bj8g2\" (UniqueName: \"kubernetes.io/projected/3386b8db-4ec5-4453-9150-c8697e8b67d4-kube-api-access-bj8g2\") pod \"metallb-operator-controller-manager-5666db5b47-2v9x9\" (UID: \"3386b8db-4ec5-4453-9150-c8697e8b67d4\") " pod="metallb-system/metallb-operator-controller-manager-5666db5b47-2v9x9" Sep 29 09:41:54 crc kubenswrapper[4779]: I0929 09:41:54.067868 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/3386b8db-4ec5-4453-9150-c8697e8b67d4-webhook-cert\") pod \"metallb-operator-controller-manager-5666db5b47-2v9x9\" (UID: \"3386b8db-4ec5-4453-9150-c8697e8b67d4\") " pod="metallb-system/metallb-operator-controller-manager-5666db5b47-2v9x9" Sep 29 09:41:54 crc kubenswrapper[4779]: I0929 09:41:54.067982 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/3386b8db-4ec5-4453-9150-c8697e8b67d4-apiservice-cert\") pod \"metallb-operator-controller-manager-5666db5b47-2v9x9\" (UID: \"3386b8db-4ec5-4453-9150-c8697e8b67d4\") " pod="metallb-system/metallb-operator-controller-manager-5666db5b47-2v9x9" Sep 29 09:41:54 crc kubenswrapper[4779]: I0929 09:41:54.168840 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bj8g2\" (UniqueName: \"kubernetes.io/projected/3386b8db-4ec5-4453-9150-c8697e8b67d4-kube-api-access-bj8g2\") pod \"metallb-operator-controller-manager-5666db5b47-2v9x9\" (UID: \"3386b8db-4ec5-4453-9150-c8697e8b67d4\") " pod="metallb-system/metallb-operator-controller-manager-5666db5b47-2v9x9" Sep 29 09:41:54 crc kubenswrapper[4779]: I0929 09:41:54.168951 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/3386b8db-4ec5-4453-9150-c8697e8b67d4-webhook-cert\") pod \"metallb-operator-controller-manager-5666db5b47-2v9x9\" (UID: \"3386b8db-4ec5-4453-9150-c8697e8b67d4\") " pod="metallb-system/metallb-operator-controller-manager-5666db5b47-2v9x9" Sep 29 09:41:54 crc kubenswrapper[4779]: I0929 09:41:54.168994 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/3386b8db-4ec5-4453-9150-c8697e8b67d4-apiservice-cert\") pod \"metallb-operator-controller-manager-5666db5b47-2v9x9\" (UID: \"3386b8db-4ec5-4453-9150-c8697e8b67d4\") " pod="metallb-system/metallb-operator-controller-manager-5666db5b47-2v9x9" Sep 29 09:41:54 crc kubenswrapper[4779]: I0929 09:41:54.175046 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/3386b8db-4ec5-4453-9150-c8697e8b67d4-webhook-cert\") pod \"metallb-operator-controller-manager-5666db5b47-2v9x9\" (UID: \"3386b8db-4ec5-4453-9150-c8697e8b67d4\") " pod="metallb-system/metallb-operator-controller-manager-5666db5b47-2v9x9" Sep 29 09:41:54 crc kubenswrapper[4779]: I0929 09:41:54.185842 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bj8g2\" (UniqueName: \"kubernetes.io/projected/3386b8db-4ec5-4453-9150-c8697e8b67d4-kube-api-access-bj8g2\") pod \"metallb-operator-controller-manager-5666db5b47-2v9x9\" (UID: \"3386b8db-4ec5-4453-9150-c8697e8b67d4\") " pod="metallb-system/metallb-operator-controller-manager-5666db5b47-2v9x9" Sep 29 09:41:54 crc kubenswrapper[4779]: I0929 09:41:54.192265 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/3386b8db-4ec5-4453-9150-c8697e8b67d4-apiservice-cert\") pod \"metallb-operator-controller-manager-5666db5b47-2v9x9\" (UID: \"3386b8db-4ec5-4453-9150-c8697e8b67d4\") " pod="metallb-system/metallb-operator-controller-manager-5666db5b47-2v9x9" Sep 29 09:41:54 crc kubenswrapper[4779]: I0929 09:41:54.234767 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-86cbc99f9-b5b6x"] Sep 29 09:41:54 crc kubenswrapper[4779]: I0929 09:41:54.235654 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-86cbc99f9-b5b6x" Sep 29 09:41:54 crc kubenswrapper[4779]: I0929 09:41:54.238411 4779 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Sep 29 09:41:54 crc kubenswrapper[4779]: I0929 09:41:54.238851 4779 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Sep 29 09:41:54 crc kubenswrapper[4779]: I0929 09:41:54.239793 4779 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-rrv7x" Sep 29 09:41:54 crc kubenswrapper[4779]: I0929 09:41:54.252494 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-86cbc99f9-b5b6x"] Sep 29 09:41:54 crc kubenswrapper[4779]: I0929 09:41:54.345851 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-5666db5b47-2v9x9" Sep 29 09:41:54 crc kubenswrapper[4779]: I0929 09:41:54.370637 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/8a35c267-ee63-490b-bf65-f1afb52840cb-apiservice-cert\") pod \"metallb-operator-webhook-server-86cbc99f9-b5b6x\" (UID: \"8a35c267-ee63-490b-bf65-f1afb52840cb\") " pod="metallb-system/metallb-operator-webhook-server-86cbc99f9-b5b6x" Sep 29 09:41:54 crc kubenswrapper[4779]: I0929 09:41:54.371030 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s9hmh\" (UniqueName: \"kubernetes.io/projected/8a35c267-ee63-490b-bf65-f1afb52840cb-kube-api-access-s9hmh\") pod \"metallb-operator-webhook-server-86cbc99f9-b5b6x\" (UID: \"8a35c267-ee63-490b-bf65-f1afb52840cb\") " pod="metallb-system/metallb-operator-webhook-server-86cbc99f9-b5b6x" Sep 29 09:41:54 crc kubenswrapper[4779]: I0929 09:41:54.371058 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/8a35c267-ee63-490b-bf65-f1afb52840cb-webhook-cert\") pod \"metallb-operator-webhook-server-86cbc99f9-b5b6x\" (UID: \"8a35c267-ee63-490b-bf65-f1afb52840cb\") " pod="metallb-system/metallb-operator-webhook-server-86cbc99f9-b5b6x" Sep 29 09:41:54 crc kubenswrapper[4779]: I0929 09:41:54.472737 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/8a35c267-ee63-490b-bf65-f1afb52840cb-apiservice-cert\") pod \"metallb-operator-webhook-server-86cbc99f9-b5b6x\" (UID: \"8a35c267-ee63-490b-bf65-f1afb52840cb\") " pod="metallb-system/metallb-operator-webhook-server-86cbc99f9-b5b6x" Sep 29 09:41:54 crc kubenswrapper[4779]: I0929 09:41:54.472797 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s9hmh\" (UniqueName: \"kubernetes.io/projected/8a35c267-ee63-490b-bf65-f1afb52840cb-kube-api-access-s9hmh\") pod \"metallb-operator-webhook-server-86cbc99f9-b5b6x\" (UID: \"8a35c267-ee63-490b-bf65-f1afb52840cb\") " pod="metallb-system/metallb-operator-webhook-server-86cbc99f9-b5b6x" Sep 29 09:41:54 crc kubenswrapper[4779]: I0929 09:41:54.472820 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/8a35c267-ee63-490b-bf65-f1afb52840cb-webhook-cert\") pod \"metallb-operator-webhook-server-86cbc99f9-b5b6x\" (UID: \"8a35c267-ee63-490b-bf65-f1afb52840cb\") " pod="metallb-system/metallb-operator-webhook-server-86cbc99f9-b5b6x" Sep 29 09:41:54 crc kubenswrapper[4779]: I0929 09:41:54.476868 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/8a35c267-ee63-490b-bf65-f1afb52840cb-webhook-cert\") pod \"metallb-operator-webhook-server-86cbc99f9-b5b6x\" (UID: \"8a35c267-ee63-490b-bf65-f1afb52840cb\") " pod="metallb-system/metallb-operator-webhook-server-86cbc99f9-b5b6x" Sep 29 09:41:54 crc kubenswrapper[4779]: I0929 09:41:54.476896 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/8a35c267-ee63-490b-bf65-f1afb52840cb-apiservice-cert\") pod \"metallb-operator-webhook-server-86cbc99f9-b5b6x\" (UID: \"8a35c267-ee63-490b-bf65-f1afb52840cb\") " pod="metallb-system/metallb-operator-webhook-server-86cbc99f9-b5b6x" Sep 29 09:41:54 crc kubenswrapper[4779]: I0929 09:41:54.498156 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s9hmh\" (UniqueName: \"kubernetes.io/projected/8a35c267-ee63-490b-bf65-f1afb52840cb-kube-api-access-s9hmh\") pod \"metallb-operator-webhook-server-86cbc99f9-b5b6x\" (UID: \"8a35c267-ee63-490b-bf65-f1afb52840cb\") " pod="metallb-system/metallb-operator-webhook-server-86cbc99f9-b5b6x" Sep 29 09:41:54 crc kubenswrapper[4779]: I0929 09:41:54.549245 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-86cbc99f9-b5b6x" Sep 29 09:41:54 crc kubenswrapper[4779]: I0929 09:41:54.775275 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-86cbc99f9-b5b6x"] Sep 29 09:41:54 crc kubenswrapper[4779]: I0929 09:41:54.782350 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-5666db5b47-2v9x9"] Sep 29 09:41:54 crc kubenswrapper[4779]: W0929 09:41:54.785273 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8a35c267_ee63_490b_bf65_f1afb52840cb.slice/crio-74c176474062c5537674ef62c209a6115790e1651636dcdb245ca5356897219e WatchSource:0}: Error finding container 74c176474062c5537674ef62c209a6115790e1651636dcdb245ca5356897219e: Status 404 returned error can't find the container with id 74c176474062c5537674ef62c209a6115790e1651636dcdb245ca5356897219e Sep 29 09:41:54 crc kubenswrapper[4779]: W0929 09:41:54.788133 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3386b8db_4ec5_4453_9150_c8697e8b67d4.slice/crio-ed898a667cdf7fd4feeda67a1aac11294db864eacf91bc3afb6bb222e6410e67 WatchSource:0}: Error finding container ed898a667cdf7fd4feeda67a1aac11294db864eacf91bc3afb6bb222e6410e67: Status 404 returned error can't find the container with id ed898a667cdf7fd4feeda67a1aac11294db864eacf91bc3afb6bb222e6410e67 Sep 29 09:41:55 crc kubenswrapper[4779]: I0929 09:41:55.748164 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-86cbc99f9-b5b6x" event={"ID":"8a35c267-ee63-490b-bf65-f1afb52840cb","Type":"ContainerStarted","Data":"74c176474062c5537674ef62c209a6115790e1651636dcdb245ca5356897219e"} Sep 29 09:41:55 crc kubenswrapper[4779]: I0929 09:41:55.749398 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-5666db5b47-2v9x9" event={"ID":"3386b8db-4ec5-4453-9150-c8697e8b67d4","Type":"ContainerStarted","Data":"ed898a667cdf7fd4feeda67a1aac11294db864eacf91bc3afb6bb222e6410e67"} Sep 29 09:41:56 crc kubenswrapper[4779]: I0929 09:41:56.366507 4779 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Sep 29 09:41:58 crc kubenswrapper[4779]: I0929 09:41:58.768158 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-5666db5b47-2v9x9" event={"ID":"3386b8db-4ec5-4453-9150-c8697e8b67d4","Type":"ContainerStarted","Data":"8ab6cd41883ced9401de4c4a39cadf5464880b2586aecd627d78afd66a00ee44"} Sep 29 09:41:58 crc kubenswrapper[4779]: I0929 09:41:58.769418 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-5666db5b47-2v9x9" Sep 29 09:41:58 crc kubenswrapper[4779]: I0929 09:41:58.797449 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-5666db5b47-2v9x9" podStartSLOduration=2.463537278 podStartE2EDuration="5.797430615s" podCreationTimestamp="2025-09-29 09:41:53 +0000 UTC" firstStartedPulling="2025-09-29 09:41:54.789612322 +0000 UTC m=+746.770936226" lastFinishedPulling="2025-09-29 09:41:58.123505659 +0000 UTC m=+750.104829563" observedRunningTime="2025-09-29 09:41:58.784292189 +0000 UTC m=+750.765616093" watchObservedRunningTime="2025-09-29 09:41:58.797430615 +0000 UTC m=+750.778754519" Sep 29 09:42:00 crc kubenswrapper[4779]: I0929 09:42:00.780790 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-86cbc99f9-b5b6x" event={"ID":"8a35c267-ee63-490b-bf65-f1afb52840cb","Type":"ContainerStarted","Data":"f3ccdcb4f9911b65130f09e23fa5d591f24941539f55ce7319a72fc252490585"} Sep 29 09:42:00 crc kubenswrapper[4779]: I0929 09:42:00.781103 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-86cbc99f9-b5b6x" Sep 29 09:42:00 crc kubenswrapper[4779]: I0929 09:42:00.808328 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-86cbc99f9-b5b6x" podStartSLOduration=1.873283641 podStartE2EDuration="6.808298311s" podCreationTimestamp="2025-09-29 09:41:54 +0000 UTC" firstStartedPulling="2025-09-29 09:41:54.789629943 +0000 UTC m=+746.770953847" lastFinishedPulling="2025-09-29 09:41:59.724644613 +0000 UTC m=+751.705968517" observedRunningTime="2025-09-29 09:42:00.806244893 +0000 UTC m=+752.787568837" watchObservedRunningTime="2025-09-29 09:42:00.808298311 +0000 UTC m=+752.789622255" Sep 29 09:42:14 crc kubenswrapper[4779]: I0929 09:42:14.553829 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-86cbc99f9-b5b6x" Sep 29 09:42:19 crc kubenswrapper[4779]: I0929 09:42:19.085122 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-lwhmf"] Sep 29 09:42:19 crc kubenswrapper[4779]: I0929 09:42:19.086843 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lwhmf" Sep 29 09:42:19 crc kubenswrapper[4779]: I0929 09:42:19.118579 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-lwhmf"] Sep 29 09:42:19 crc kubenswrapper[4779]: I0929 09:42:19.201545 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zm7g4\" (UniqueName: \"kubernetes.io/projected/86259ef6-d853-4d9a-98dd-98933c8b1b22-kube-api-access-zm7g4\") pod \"redhat-marketplace-lwhmf\" (UID: \"86259ef6-d853-4d9a-98dd-98933c8b1b22\") " pod="openshift-marketplace/redhat-marketplace-lwhmf" Sep 29 09:42:19 crc kubenswrapper[4779]: I0929 09:42:19.201821 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86259ef6-d853-4d9a-98dd-98933c8b1b22-utilities\") pod \"redhat-marketplace-lwhmf\" (UID: \"86259ef6-d853-4d9a-98dd-98933c8b1b22\") " pod="openshift-marketplace/redhat-marketplace-lwhmf" Sep 29 09:42:19 crc kubenswrapper[4779]: I0929 09:42:19.201959 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86259ef6-d853-4d9a-98dd-98933c8b1b22-catalog-content\") pod \"redhat-marketplace-lwhmf\" (UID: \"86259ef6-d853-4d9a-98dd-98933c8b1b22\") " pod="openshift-marketplace/redhat-marketplace-lwhmf" Sep 29 09:42:19 crc kubenswrapper[4779]: I0929 09:42:19.303137 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zm7g4\" (UniqueName: \"kubernetes.io/projected/86259ef6-d853-4d9a-98dd-98933c8b1b22-kube-api-access-zm7g4\") pod \"redhat-marketplace-lwhmf\" (UID: \"86259ef6-d853-4d9a-98dd-98933c8b1b22\") " pod="openshift-marketplace/redhat-marketplace-lwhmf" Sep 29 09:42:19 crc kubenswrapper[4779]: I0929 09:42:19.303443 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86259ef6-d853-4d9a-98dd-98933c8b1b22-utilities\") pod \"redhat-marketplace-lwhmf\" (UID: \"86259ef6-d853-4d9a-98dd-98933c8b1b22\") " pod="openshift-marketplace/redhat-marketplace-lwhmf" Sep 29 09:42:19 crc kubenswrapper[4779]: I0929 09:42:19.303557 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86259ef6-d853-4d9a-98dd-98933c8b1b22-catalog-content\") pod \"redhat-marketplace-lwhmf\" (UID: \"86259ef6-d853-4d9a-98dd-98933c8b1b22\") " pod="openshift-marketplace/redhat-marketplace-lwhmf" Sep 29 09:42:19 crc kubenswrapper[4779]: I0929 09:42:19.304055 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86259ef6-d853-4d9a-98dd-98933c8b1b22-utilities\") pod \"redhat-marketplace-lwhmf\" (UID: \"86259ef6-d853-4d9a-98dd-98933c8b1b22\") " pod="openshift-marketplace/redhat-marketplace-lwhmf" Sep 29 09:42:19 crc kubenswrapper[4779]: I0929 09:42:19.304115 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86259ef6-d853-4d9a-98dd-98933c8b1b22-catalog-content\") pod \"redhat-marketplace-lwhmf\" (UID: \"86259ef6-d853-4d9a-98dd-98933c8b1b22\") " pod="openshift-marketplace/redhat-marketplace-lwhmf" Sep 29 09:42:19 crc kubenswrapper[4779]: I0929 09:42:19.330771 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zm7g4\" (UniqueName: \"kubernetes.io/projected/86259ef6-d853-4d9a-98dd-98933c8b1b22-kube-api-access-zm7g4\") pod \"redhat-marketplace-lwhmf\" (UID: \"86259ef6-d853-4d9a-98dd-98933c8b1b22\") " pod="openshift-marketplace/redhat-marketplace-lwhmf" Sep 29 09:42:19 crc kubenswrapper[4779]: I0929 09:42:19.405387 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lwhmf" Sep 29 09:42:19 crc kubenswrapper[4779]: I0929 09:42:19.826598 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-lwhmf"] Sep 29 09:42:19 crc kubenswrapper[4779]: W0929 09:42:19.834574 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod86259ef6_d853_4d9a_98dd_98933c8b1b22.slice/crio-293493b1885fba79c1ed5af0285c46e059486d96d3652ac50acde506076466c3 WatchSource:0}: Error finding container 293493b1885fba79c1ed5af0285c46e059486d96d3652ac50acde506076466c3: Status 404 returned error can't find the container with id 293493b1885fba79c1ed5af0285c46e059486d96d3652ac50acde506076466c3 Sep 29 09:42:19 crc kubenswrapper[4779]: I0929 09:42:19.889372 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lwhmf" event={"ID":"86259ef6-d853-4d9a-98dd-98933c8b1b22","Type":"ContainerStarted","Data":"293493b1885fba79c1ed5af0285c46e059486d96d3652ac50acde506076466c3"} Sep 29 09:42:20 crc kubenswrapper[4779]: I0929 09:42:20.897522 4779 generic.go:334] "Generic (PLEG): container finished" podID="86259ef6-d853-4d9a-98dd-98933c8b1b22" containerID="fecc9fa17d8c61880fa9de2d7fad53a70fc82ef81b49b464f54c9f6a1e893a0a" exitCode=0 Sep 29 09:42:20 crc kubenswrapper[4779]: I0929 09:42:20.897582 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lwhmf" event={"ID":"86259ef6-d853-4d9a-98dd-98933c8b1b22","Type":"ContainerDied","Data":"fecc9fa17d8c61880fa9de2d7fad53a70fc82ef81b49b464f54c9f6a1e893a0a"} Sep 29 09:42:21 crc kubenswrapper[4779]: I0929 09:42:21.906890 4779 generic.go:334] "Generic (PLEG): container finished" podID="86259ef6-d853-4d9a-98dd-98933c8b1b22" containerID="ccbc882dd0ef16857a6e6722b00ec27ddee7be36c99e0a1390bc19bd97f31d35" exitCode=0 Sep 29 09:42:21 crc kubenswrapper[4779]: I0929 09:42:21.907009 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lwhmf" event={"ID":"86259ef6-d853-4d9a-98dd-98933c8b1b22","Type":"ContainerDied","Data":"ccbc882dd0ef16857a6e6722b00ec27ddee7be36c99e0a1390bc19bd97f31d35"} Sep 29 09:42:22 crc kubenswrapper[4779]: I0929 09:42:22.916534 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lwhmf" event={"ID":"86259ef6-d853-4d9a-98dd-98933c8b1b22","Type":"ContainerStarted","Data":"8cf524b9106a77add3db3860d1e4b7c37765636ba0d73362e0d78373c2adba77"} Sep 29 09:42:22 crc kubenswrapper[4779]: I0929 09:42:22.937164 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-lwhmf" podStartSLOduration=2.542427462 podStartE2EDuration="3.937145791s" podCreationTimestamp="2025-09-29 09:42:19 +0000 UTC" firstStartedPulling="2025-09-29 09:42:20.899366926 +0000 UTC m=+772.880690850" lastFinishedPulling="2025-09-29 09:42:22.294085275 +0000 UTC m=+774.275409179" observedRunningTime="2025-09-29 09:42:22.934017524 +0000 UTC m=+774.915341438" watchObservedRunningTime="2025-09-29 09:42:22.937145791 +0000 UTC m=+774.918469705" Sep 29 09:42:29 crc kubenswrapper[4779]: I0929 09:42:29.408226 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-lwhmf" Sep 29 09:42:29 crc kubenswrapper[4779]: I0929 09:42:29.408821 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-lwhmf" Sep 29 09:42:29 crc kubenswrapper[4779]: I0929 09:42:29.450562 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-lwhmf" Sep 29 09:42:30 crc kubenswrapper[4779]: I0929 09:42:30.011759 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-lwhmf" Sep 29 09:42:31 crc kubenswrapper[4779]: I0929 09:42:31.856145 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-lwhmf"] Sep 29 09:42:31 crc kubenswrapper[4779]: I0929 09:42:31.973235 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-lwhmf" podUID="86259ef6-d853-4d9a-98dd-98933c8b1b22" containerName="registry-server" containerID="cri-o://8cf524b9106a77add3db3860d1e4b7c37765636ba0d73362e0d78373c2adba77" gracePeriod=2 Sep 29 09:42:32 crc kubenswrapper[4779]: I0929 09:42:32.415938 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lwhmf" Sep 29 09:42:32 crc kubenswrapper[4779]: I0929 09:42:32.568081 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86259ef6-d853-4d9a-98dd-98933c8b1b22-utilities\") pod \"86259ef6-d853-4d9a-98dd-98933c8b1b22\" (UID: \"86259ef6-d853-4d9a-98dd-98933c8b1b22\") " Sep 29 09:42:32 crc kubenswrapper[4779]: I0929 09:42:32.568146 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86259ef6-d853-4d9a-98dd-98933c8b1b22-catalog-content\") pod \"86259ef6-d853-4d9a-98dd-98933c8b1b22\" (UID: \"86259ef6-d853-4d9a-98dd-98933c8b1b22\") " Sep 29 09:42:32 crc kubenswrapper[4779]: I0929 09:42:32.568236 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zm7g4\" (UniqueName: \"kubernetes.io/projected/86259ef6-d853-4d9a-98dd-98933c8b1b22-kube-api-access-zm7g4\") pod \"86259ef6-d853-4d9a-98dd-98933c8b1b22\" (UID: \"86259ef6-d853-4d9a-98dd-98933c8b1b22\") " Sep 29 09:42:32 crc kubenswrapper[4779]: I0929 09:42:32.569073 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/86259ef6-d853-4d9a-98dd-98933c8b1b22-utilities" (OuterVolumeSpecName: "utilities") pod "86259ef6-d853-4d9a-98dd-98933c8b1b22" (UID: "86259ef6-d853-4d9a-98dd-98933c8b1b22"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:42:32 crc kubenswrapper[4779]: I0929 09:42:32.576403 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/86259ef6-d853-4d9a-98dd-98933c8b1b22-kube-api-access-zm7g4" (OuterVolumeSpecName: "kube-api-access-zm7g4") pod "86259ef6-d853-4d9a-98dd-98933c8b1b22" (UID: "86259ef6-d853-4d9a-98dd-98933c8b1b22"). InnerVolumeSpecName "kube-api-access-zm7g4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:42:32 crc kubenswrapper[4779]: I0929 09:42:32.580492 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/86259ef6-d853-4d9a-98dd-98933c8b1b22-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "86259ef6-d853-4d9a-98dd-98933c8b1b22" (UID: "86259ef6-d853-4d9a-98dd-98933c8b1b22"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:42:32 crc kubenswrapper[4779]: I0929 09:42:32.669650 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86259ef6-d853-4d9a-98dd-98933c8b1b22-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 09:42:32 crc kubenswrapper[4779]: I0929 09:42:32.669691 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86259ef6-d853-4d9a-98dd-98933c8b1b22-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 09:42:32 crc kubenswrapper[4779]: I0929 09:42:32.669705 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zm7g4\" (UniqueName: \"kubernetes.io/projected/86259ef6-d853-4d9a-98dd-98933c8b1b22-kube-api-access-zm7g4\") on node \"crc\" DevicePath \"\"" Sep 29 09:42:32 crc kubenswrapper[4779]: I0929 09:42:32.980525 4779 generic.go:334] "Generic (PLEG): container finished" podID="86259ef6-d853-4d9a-98dd-98933c8b1b22" containerID="8cf524b9106a77add3db3860d1e4b7c37765636ba0d73362e0d78373c2adba77" exitCode=0 Sep 29 09:42:32 crc kubenswrapper[4779]: I0929 09:42:32.980596 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lwhmf" event={"ID":"86259ef6-d853-4d9a-98dd-98933c8b1b22","Type":"ContainerDied","Data":"8cf524b9106a77add3db3860d1e4b7c37765636ba0d73362e0d78373c2adba77"} Sep 29 09:42:32 crc kubenswrapper[4779]: I0929 09:42:32.980652 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lwhmf" Sep 29 09:42:32 crc kubenswrapper[4779]: I0929 09:42:32.980849 4779 scope.go:117] "RemoveContainer" containerID="8cf524b9106a77add3db3860d1e4b7c37765636ba0d73362e0d78373c2adba77" Sep 29 09:42:32 crc kubenswrapper[4779]: I0929 09:42:32.980834 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lwhmf" event={"ID":"86259ef6-d853-4d9a-98dd-98933c8b1b22","Type":"ContainerDied","Data":"293493b1885fba79c1ed5af0285c46e059486d96d3652ac50acde506076466c3"} Sep 29 09:42:32 crc kubenswrapper[4779]: I0929 09:42:32.996315 4779 scope.go:117] "RemoveContainer" containerID="ccbc882dd0ef16857a6e6722b00ec27ddee7be36c99e0a1390bc19bd97f31d35" Sep 29 09:42:33 crc kubenswrapper[4779]: I0929 09:42:33.003490 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-lwhmf"] Sep 29 09:42:33 crc kubenswrapper[4779]: I0929 09:42:33.007475 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-lwhmf"] Sep 29 09:42:33 crc kubenswrapper[4779]: I0929 09:42:33.011080 4779 scope.go:117] "RemoveContainer" containerID="fecc9fa17d8c61880fa9de2d7fad53a70fc82ef81b49b464f54c9f6a1e893a0a" Sep 29 09:42:33 crc kubenswrapper[4779]: I0929 09:42:33.031621 4779 scope.go:117] "RemoveContainer" containerID="8cf524b9106a77add3db3860d1e4b7c37765636ba0d73362e0d78373c2adba77" Sep 29 09:42:33 crc kubenswrapper[4779]: E0929 09:42:33.032119 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8cf524b9106a77add3db3860d1e4b7c37765636ba0d73362e0d78373c2adba77\": container with ID starting with 8cf524b9106a77add3db3860d1e4b7c37765636ba0d73362e0d78373c2adba77 not found: ID does not exist" containerID="8cf524b9106a77add3db3860d1e4b7c37765636ba0d73362e0d78373c2adba77" Sep 29 09:42:33 crc kubenswrapper[4779]: I0929 09:42:33.032156 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8cf524b9106a77add3db3860d1e4b7c37765636ba0d73362e0d78373c2adba77"} err="failed to get container status \"8cf524b9106a77add3db3860d1e4b7c37765636ba0d73362e0d78373c2adba77\": rpc error: code = NotFound desc = could not find container \"8cf524b9106a77add3db3860d1e4b7c37765636ba0d73362e0d78373c2adba77\": container with ID starting with 8cf524b9106a77add3db3860d1e4b7c37765636ba0d73362e0d78373c2adba77 not found: ID does not exist" Sep 29 09:42:33 crc kubenswrapper[4779]: I0929 09:42:33.032188 4779 scope.go:117] "RemoveContainer" containerID="ccbc882dd0ef16857a6e6722b00ec27ddee7be36c99e0a1390bc19bd97f31d35" Sep 29 09:42:33 crc kubenswrapper[4779]: E0929 09:42:33.032472 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ccbc882dd0ef16857a6e6722b00ec27ddee7be36c99e0a1390bc19bd97f31d35\": container with ID starting with ccbc882dd0ef16857a6e6722b00ec27ddee7be36c99e0a1390bc19bd97f31d35 not found: ID does not exist" containerID="ccbc882dd0ef16857a6e6722b00ec27ddee7be36c99e0a1390bc19bd97f31d35" Sep 29 09:42:33 crc kubenswrapper[4779]: I0929 09:42:33.032501 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ccbc882dd0ef16857a6e6722b00ec27ddee7be36c99e0a1390bc19bd97f31d35"} err="failed to get container status \"ccbc882dd0ef16857a6e6722b00ec27ddee7be36c99e0a1390bc19bd97f31d35\": rpc error: code = NotFound desc = could not find container \"ccbc882dd0ef16857a6e6722b00ec27ddee7be36c99e0a1390bc19bd97f31d35\": container with ID starting with ccbc882dd0ef16857a6e6722b00ec27ddee7be36c99e0a1390bc19bd97f31d35 not found: ID does not exist" Sep 29 09:42:33 crc kubenswrapper[4779]: I0929 09:42:33.032518 4779 scope.go:117] "RemoveContainer" containerID="fecc9fa17d8c61880fa9de2d7fad53a70fc82ef81b49b464f54c9f6a1e893a0a" Sep 29 09:42:33 crc kubenswrapper[4779]: E0929 09:42:33.032726 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fecc9fa17d8c61880fa9de2d7fad53a70fc82ef81b49b464f54c9f6a1e893a0a\": container with ID starting with fecc9fa17d8c61880fa9de2d7fad53a70fc82ef81b49b464f54c9f6a1e893a0a not found: ID does not exist" containerID="fecc9fa17d8c61880fa9de2d7fad53a70fc82ef81b49b464f54c9f6a1e893a0a" Sep 29 09:42:33 crc kubenswrapper[4779]: I0929 09:42:33.032755 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fecc9fa17d8c61880fa9de2d7fad53a70fc82ef81b49b464f54c9f6a1e893a0a"} err="failed to get container status \"fecc9fa17d8c61880fa9de2d7fad53a70fc82ef81b49b464f54c9f6a1e893a0a\": rpc error: code = NotFound desc = could not find container \"fecc9fa17d8c61880fa9de2d7fad53a70fc82ef81b49b464f54c9f6a1e893a0a\": container with ID starting with fecc9fa17d8c61880fa9de2d7fad53a70fc82ef81b49b464f54c9f6a1e893a0a not found: ID does not exist" Sep 29 09:42:34 crc kubenswrapper[4779]: I0929 09:42:34.350778 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-5666db5b47-2v9x9" Sep 29 09:42:34 crc kubenswrapper[4779]: I0929 09:42:34.722985 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="86259ef6-d853-4d9a-98dd-98933c8b1b22" path="/var/lib/kubelet/pods/86259ef6-d853-4d9a-98dd-98933c8b1b22/volumes" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.163652 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-5478bdb765-j47xm"] Sep 29 09:42:35 crc kubenswrapper[4779]: E0929 09:42:35.164074 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86259ef6-d853-4d9a-98dd-98933c8b1b22" containerName="extract-content" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.164102 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="86259ef6-d853-4d9a-98dd-98933c8b1b22" containerName="extract-content" Sep 29 09:42:35 crc kubenswrapper[4779]: E0929 09:42:35.164119 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86259ef6-d853-4d9a-98dd-98933c8b1b22" containerName="registry-server" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.164131 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="86259ef6-d853-4d9a-98dd-98933c8b1b22" containerName="registry-server" Sep 29 09:42:35 crc kubenswrapper[4779]: E0929 09:42:35.164162 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86259ef6-d853-4d9a-98dd-98933c8b1b22" containerName="extract-utilities" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.164175 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="86259ef6-d853-4d9a-98dd-98933c8b1b22" containerName="extract-utilities" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.164364 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="86259ef6-d853-4d9a-98dd-98933c8b1b22" containerName="registry-server" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.165038 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-j47xm" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.166763 4779 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.167130 4779 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-bnqfd" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.169474 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-hs7fd"] Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.172494 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-hs7fd" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.181223 4779 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.182492 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.191149 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-5478bdb765-j47xm"] Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.244975 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-n857k"] Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.246050 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-n857k" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.253069 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.253233 4779 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.253346 4779 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.253514 4779 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-pc9x2" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.268497 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-5d688f5ffc-l6gxt"] Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.269787 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-5d688f5ffc-l6gxt" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.273601 4779 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.286586 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-5d688f5ffc-l6gxt"] Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.309129 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-69bn8\" (UniqueName: \"kubernetes.io/projected/84543eb2-59d1-4d2c-986c-5836e23a4a68-kube-api-access-69bn8\") pod \"frr-k8s-hs7fd\" (UID: \"84543eb2-59d1-4d2c-986c-5836e23a4a68\") " pod="metallb-system/frr-k8s-hs7fd" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.309179 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/84543eb2-59d1-4d2c-986c-5836e23a4a68-metrics\") pod \"frr-k8s-hs7fd\" (UID: \"84543eb2-59d1-4d2c-986c-5836e23a4a68\") " pod="metallb-system/frr-k8s-hs7fd" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.309201 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/84543eb2-59d1-4d2c-986c-5836e23a4a68-frr-conf\") pod \"frr-k8s-hs7fd\" (UID: \"84543eb2-59d1-4d2c-986c-5836e23a4a68\") " pod="metallb-system/frr-k8s-hs7fd" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.309220 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/148080b1-7775-4b6c-ad08-5de4695051bb-cert\") pod \"frr-k8s-webhook-server-5478bdb765-j47xm\" (UID: \"148080b1-7775-4b6c-ad08-5de4695051bb\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-j47xm" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.309242 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/84543eb2-59d1-4d2c-986c-5836e23a4a68-metrics-certs\") pod \"frr-k8s-hs7fd\" (UID: \"84543eb2-59d1-4d2c-986c-5836e23a4a68\") " pod="metallb-system/frr-k8s-hs7fd" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.309311 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zddxf\" (UniqueName: \"kubernetes.io/projected/148080b1-7775-4b6c-ad08-5de4695051bb-kube-api-access-zddxf\") pod \"frr-k8s-webhook-server-5478bdb765-j47xm\" (UID: \"148080b1-7775-4b6c-ad08-5de4695051bb\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-j47xm" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.309476 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/84543eb2-59d1-4d2c-986c-5836e23a4a68-frr-sockets\") pod \"frr-k8s-hs7fd\" (UID: \"84543eb2-59d1-4d2c-986c-5836e23a4a68\") " pod="metallb-system/frr-k8s-hs7fd" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.309516 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/84543eb2-59d1-4d2c-986c-5836e23a4a68-frr-startup\") pod \"frr-k8s-hs7fd\" (UID: \"84543eb2-59d1-4d2c-986c-5836e23a4a68\") " pod="metallb-system/frr-k8s-hs7fd" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.309540 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/84543eb2-59d1-4d2c-986c-5836e23a4a68-reloader\") pod \"frr-k8s-hs7fd\" (UID: \"84543eb2-59d1-4d2c-986c-5836e23a4a68\") " pod="metallb-system/frr-k8s-hs7fd" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.411119 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/84543eb2-59d1-4d2c-986c-5836e23a4a68-frr-sockets\") pod \"frr-k8s-hs7fd\" (UID: \"84543eb2-59d1-4d2c-986c-5836e23a4a68\") " pod="metallb-system/frr-k8s-hs7fd" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.411398 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/84543eb2-59d1-4d2c-986c-5836e23a4a68-frr-startup\") pod \"frr-k8s-hs7fd\" (UID: \"84543eb2-59d1-4d2c-986c-5836e23a4a68\") " pod="metallb-system/frr-k8s-hs7fd" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.411416 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/84543eb2-59d1-4d2c-986c-5836e23a4a68-reloader\") pod \"frr-k8s-hs7fd\" (UID: \"84543eb2-59d1-4d2c-986c-5836e23a4a68\") " pod="metallb-system/frr-k8s-hs7fd" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.411435 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b474caaa-981c-427a-890c-aab91f461a90-metrics-certs\") pod \"speaker-n857k\" (UID: \"b474caaa-981c-427a-890c-aab91f461a90\") " pod="metallb-system/speaker-n857k" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.411470 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-69bn8\" (UniqueName: \"kubernetes.io/projected/84543eb2-59d1-4d2c-986c-5836e23a4a68-kube-api-access-69bn8\") pod \"frr-k8s-hs7fd\" (UID: \"84543eb2-59d1-4d2c-986c-5836e23a4a68\") " pod="metallb-system/frr-k8s-hs7fd" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.411494 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/84543eb2-59d1-4d2c-986c-5836e23a4a68-metrics\") pod \"frr-k8s-hs7fd\" (UID: \"84543eb2-59d1-4d2c-986c-5836e23a4a68\") " pod="metallb-system/frr-k8s-hs7fd" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.411512 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/84543eb2-59d1-4d2c-986c-5836e23a4a68-frr-conf\") pod \"frr-k8s-hs7fd\" (UID: \"84543eb2-59d1-4d2c-986c-5836e23a4a68\") " pod="metallb-system/frr-k8s-hs7fd" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.411529 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/148080b1-7775-4b6c-ad08-5de4695051bb-cert\") pod \"frr-k8s-webhook-server-5478bdb765-j47xm\" (UID: \"148080b1-7775-4b6c-ad08-5de4695051bb\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-j47xm" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.411546 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/b474caaa-981c-427a-890c-aab91f461a90-metallb-excludel2\") pod \"speaker-n857k\" (UID: \"b474caaa-981c-427a-890c-aab91f461a90\") " pod="metallb-system/speaker-n857k" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.411561 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4rjmn\" (UniqueName: \"kubernetes.io/projected/c10a1808-8263-4c3f-8a8c-dc22de262eee-kube-api-access-4rjmn\") pod \"controller-5d688f5ffc-l6gxt\" (UID: \"c10a1808-8263-4c3f-8a8c-dc22de262eee\") " pod="metallb-system/controller-5d688f5ffc-l6gxt" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.411582 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/84543eb2-59d1-4d2c-986c-5836e23a4a68-metrics-certs\") pod \"frr-k8s-hs7fd\" (UID: \"84543eb2-59d1-4d2c-986c-5836e23a4a68\") " pod="metallb-system/frr-k8s-hs7fd" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.411599 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zddxf\" (UniqueName: \"kubernetes.io/projected/148080b1-7775-4b6c-ad08-5de4695051bb-kube-api-access-zddxf\") pod \"frr-k8s-webhook-server-5478bdb765-j47xm\" (UID: \"148080b1-7775-4b6c-ad08-5de4695051bb\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-j47xm" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.411605 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/84543eb2-59d1-4d2c-986c-5836e23a4a68-frr-sockets\") pod \"frr-k8s-hs7fd\" (UID: \"84543eb2-59d1-4d2c-986c-5836e23a4a68\") " pod="metallb-system/frr-k8s-hs7fd" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.411618 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hdshz\" (UniqueName: \"kubernetes.io/projected/b474caaa-981c-427a-890c-aab91f461a90-kube-api-access-hdshz\") pod \"speaker-n857k\" (UID: \"b474caaa-981c-427a-890c-aab91f461a90\") " pod="metallb-system/speaker-n857k" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.411691 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/b474caaa-981c-427a-890c-aab91f461a90-memberlist\") pod \"speaker-n857k\" (UID: \"b474caaa-981c-427a-890c-aab91f461a90\") " pod="metallb-system/speaker-n857k" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.411777 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c10a1808-8263-4c3f-8a8c-dc22de262eee-metrics-certs\") pod \"controller-5d688f5ffc-l6gxt\" (UID: \"c10a1808-8263-4c3f-8a8c-dc22de262eee\") " pod="metallb-system/controller-5d688f5ffc-l6gxt" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.411794 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c10a1808-8263-4c3f-8a8c-dc22de262eee-cert\") pod \"controller-5d688f5ffc-l6gxt\" (UID: \"c10a1808-8263-4c3f-8a8c-dc22de262eee\") " pod="metallb-system/controller-5d688f5ffc-l6gxt" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.411861 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/84543eb2-59d1-4d2c-986c-5836e23a4a68-reloader\") pod \"frr-k8s-hs7fd\" (UID: \"84543eb2-59d1-4d2c-986c-5836e23a4a68\") " pod="metallb-system/frr-k8s-hs7fd" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.412125 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/84543eb2-59d1-4d2c-986c-5836e23a4a68-metrics\") pod \"frr-k8s-hs7fd\" (UID: \"84543eb2-59d1-4d2c-986c-5836e23a4a68\") " pod="metallb-system/frr-k8s-hs7fd" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.412297 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/84543eb2-59d1-4d2c-986c-5836e23a4a68-frr-conf\") pod \"frr-k8s-hs7fd\" (UID: \"84543eb2-59d1-4d2c-986c-5836e23a4a68\") " pod="metallb-system/frr-k8s-hs7fd" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.412327 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/84543eb2-59d1-4d2c-986c-5836e23a4a68-frr-startup\") pod \"frr-k8s-hs7fd\" (UID: \"84543eb2-59d1-4d2c-986c-5836e23a4a68\") " pod="metallb-system/frr-k8s-hs7fd" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.416316 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/84543eb2-59d1-4d2c-986c-5836e23a4a68-metrics-certs\") pod \"frr-k8s-hs7fd\" (UID: \"84543eb2-59d1-4d2c-986c-5836e23a4a68\") " pod="metallb-system/frr-k8s-hs7fd" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.416559 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/148080b1-7775-4b6c-ad08-5de4695051bb-cert\") pod \"frr-k8s-webhook-server-5478bdb765-j47xm\" (UID: \"148080b1-7775-4b6c-ad08-5de4695051bb\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-j47xm" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.430587 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-69bn8\" (UniqueName: \"kubernetes.io/projected/84543eb2-59d1-4d2c-986c-5836e23a4a68-kube-api-access-69bn8\") pod \"frr-k8s-hs7fd\" (UID: \"84543eb2-59d1-4d2c-986c-5836e23a4a68\") " pod="metallb-system/frr-k8s-hs7fd" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.432475 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zddxf\" (UniqueName: \"kubernetes.io/projected/148080b1-7775-4b6c-ad08-5de4695051bb-kube-api-access-zddxf\") pod \"frr-k8s-webhook-server-5478bdb765-j47xm\" (UID: \"148080b1-7775-4b6c-ad08-5de4695051bb\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-j47xm" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.483080 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-j47xm" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.495573 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-hs7fd" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.512950 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/b474caaa-981c-427a-890c-aab91f461a90-metallb-excludel2\") pod \"speaker-n857k\" (UID: \"b474caaa-981c-427a-890c-aab91f461a90\") " pod="metallb-system/speaker-n857k" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.513001 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4rjmn\" (UniqueName: \"kubernetes.io/projected/c10a1808-8263-4c3f-8a8c-dc22de262eee-kube-api-access-4rjmn\") pod \"controller-5d688f5ffc-l6gxt\" (UID: \"c10a1808-8263-4c3f-8a8c-dc22de262eee\") " pod="metallb-system/controller-5d688f5ffc-l6gxt" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.513043 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/b474caaa-981c-427a-890c-aab91f461a90-memberlist\") pod \"speaker-n857k\" (UID: \"b474caaa-981c-427a-890c-aab91f461a90\") " pod="metallb-system/speaker-n857k" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.513067 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hdshz\" (UniqueName: \"kubernetes.io/projected/b474caaa-981c-427a-890c-aab91f461a90-kube-api-access-hdshz\") pod \"speaker-n857k\" (UID: \"b474caaa-981c-427a-890c-aab91f461a90\") " pod="metallb-system/speaker-n857k" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.513109 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c10a1808-8263-4c3f-8a8c-dc22de262eee-cert\") pod \"controller-5d688f5ffc-l6gxt\" (UID: \"c10a1808-8263-4c3f-8a8c-dc22de262eee\") " pod="metallb-system/controller-5d688f5ffc-l6gxt" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.513129 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c10a1808-8263-4c3f-8a8c-dc22de262eee-metrics-certs\") pod \"controller-5d688f5ffc-l6gxt\" (UID: \"c10a1808-8263-4c3f-8a8c-dc22de262eee\") " pod="metallb-system/controller-5d688f5ffc-l6gxt" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.513174 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b474caaa-981c-427a-890c-aab91f461a90-metrics-certs\") pod \"speaker-n857k\" (UID: \"b474caaa-981c-427a-890c-aab91f461a90\") " pod="metallb-system/speaker-n857k" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.513754 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/b474caaa-981c-427a-890c-aab91f461a90-metallb-excludel2\") pod \"speaker-n857k\" (UID: \"b474caaa-981c-427a-890c-aab91f461a90\") " pod="metallb-system/speaker-n857k" Sep 29 09:42:35 crc kubenswrapper[4779]: E0929 09:42:35.514293 4779 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Sep 29 09:42:35 crc kubenswrapper[4779]: E0929 09:42:35.514438 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b474caaa-981c-427a-890c-aab91f461a90-memberlist podName:b474caaa-981c-427a-890c-aab91f461a90 nodeName:}" failed. No retries permitted until 2025-09-29 09:42:36.01441619 +0000 UTC m=+787.995740184 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/b474caaa-981c-427a-890c-aab91f461a90-memberlist") pod "speaker-n857k" (UID: "b474caaa-981c-427a-890c-aab91f461a90") : secret "metallb-memberlist" not found Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.517729 4779 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.518611 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c10a1808-8263-4c3f-8a8c-dc22de262eee-metrics-certs\") pod \"controller-5d688f5ffc-l6gxt\" (UID: \"c10a1808-8263-4c3f-8a8c-dc22de262eee\") " pod="metallb-system/controller-5d688f5ffc-l6gxt" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.520599 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b474caaa-981c-427a-890c-aab91f461a90-metrics-certs\") pod \"speaker-n857k\" (UID: \"b474caaa-981c-427a-890c-aab91f461a90\") " pod="metallb-system/speaker-n857k" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.526525 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c10a1808-8263-4c3f-8a8c-dc22de262eee-cert\") pod \"controller-5d688f5ffc-l6gxt\" (UID: \"c10a1808-8263-4c3f-8a8c-dc22de262eee\") " pod="metallb-system/controller-5d688f5ffc-l6gxt" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.529451 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4rjmn\" (UniqueName: \"kubernetes.io/projected/c10a1808-8263-4c3f-8a8c-dc22de262eee-kube-api-access-4rjmn\") pod \"controller-5d688f5ffc-l6gxt\" (UID: \"c10a1808-8263-4c3f-8a8c-dc22de262eee\") " pod="metallb-system/controller-5d688f5ffc-l6gxt" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.530418 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hdshz\" (UniqueName: \"kubernetes.io/projected/b474caaa-981c-427a-890c-aab91f461a90-kube-api-access-hdshz\") pod \"speaker-n857k\" (UID: \"b474caaa-981c-427a-890c-aab91f461a90\") " pod="metallb-system/speaker-n857k" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.582534 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-5d688f5ffc-l6gxt" Sep 29 09:42:35 crc kubenswrapper[4779]: I0929 09:42:35.865253 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-5478bdb765-j47xm"] Sep 29 09:42:35 crc kubenswrapper[4779]: W0929 09:42:35.870632 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod148080b1_7775_4b6c_ad08_5de4695051bb.slice/crio-113bdffdce5b0242348bb0fda60d047570b7e685c60e1e7776f0dbc2c136902d WatchSource:0}: Error finding container 113bdffdce5b0242348bb0fda60d047570b7e685c60e1e7776f0dbc2c136902d: Status 404 returned error can't find the container with id 113bdffdce5b0242348bb0fda60d047570b7e685c60e1e7776f0dbc2c136902d Sep 29 09:42:36 crc kubenswrapper[4779]: I0929 09:42:36.001439 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-hs7fd" event={"ID":"84543eb2-59d1-4d2c-986c-5836e23a4a68","Type":"ContainerStarted","Data":"ea081a882e2bd481fa2b1460a41d4269e9c7dafb22b532f0637be3bfff7e7f28"} Sep 29 09:42:36 crc kubenswrapper[4779]: I0929 09:42:36.002202 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-j47xm" event={"ID":"148080b1-7775-4b6c-ad08-5de4695051bb","Type":"ContainerStarted","Data":"113bdffdce5b0242348bb0fda60d047570b7e685c60e1e7776f0dbc2c136902d"} Sep 29 09:42:36 crc kubenswrapper[4779]: I0929 09:42:36.019797 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/b474caaa-981c-427a-890c-aab91f461a90-memberlist\") pod \"speaker-n857k\" (UID: \"b474caaa-981c-427a-890c-aab91f461a90\") " pod="metallb-system/speaker-n857k" Sep 29 09:42:36 crc kubenswrapper[4779]: E0929 09:42:36.020182 4779 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Sep 29 09:42:36 crc kubenswrapper[4779]: E0929 09:42:36.020307 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b474caaa-981c-427a-890c-aab91f461a90-memberlist podName:b474caaa-981c-427a-890c-aab91f461a90 nodeName:}" failed. No retries permitted until 2025-09-29 09:42:37.020279381 +0000 UTC m=+789.001603295 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/b474caaa-981c-427a-890c-aab91f461a90-memberlist") pod "speaker-n857k" (UID: "b474caaa-981c-427a-890c-aab91f461a90") : secret "metallb-memberlist" not found Sep 29 09:42:36 crc kubenswrapper[4779]: I0929 09:42:36.030552 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-5d688f5ffc-l6gxt"] Sep 29 09:42:36 crc kubenswrapper[4779]: W0929 09:42:36.039056 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc10a1808_8263_4c3f_8a8c_dc22de262eee.slice/crio-729146f6634611a416b2d1dc8fca2d0d0cbb688480c3b337216a904d88e15f30 WatchSource:0}: Error finding container 729146f6634611a416b2d1dc8fca2d0d0cbb688480c3b337216a904d88e15f30: Status 404 returned error can't find the container with id 729146f6634611a416b2d1dc8fca2d0d0cbb688480c3b337216a904d88e15f30 Sep 29 09:42:36 crc kubenswrapper[4779]: I0929 09:42:36.461750 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-g5rrg"] Sep 29 09:42:36 crc kubenswrapper[4779]: I0929 09:42:36.463299 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-g5rrg" Sep 29 09:42:36 crc kubenswrapper[4779]: I0929 09:42:36.473815 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-g5rrg"] Sep 29 09:42:36 crc kubenswrapper[4779]: I0929 09:42:36.627875 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8173a109-e4e9-45f7-926a-1e27a50818f8-utilities\") pod \"community-operators-g5rrg\" (UID: \"8173a109-e4e9-45f7-926a-1e27a50818f8\") " pod="openshift-marketplace/community-operators-g5rrg" Sep 29 09:42:36 crc kubenswrapper[4779]: I0929 09:42:36.628017 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8173a109-e4e9-45f7-926a-1e27a50818f8-catalog-content\") pod \"community-operators-g5rrg\" (UID: \"8173a109-e4e9-45f7-926a-1e27a50818f8\") " pod="openshift-marketplace/community-operators-g5rrg" Sep 29 09:42:36 crc kubenswrapper[4779]: I0929 09:42:36.628206 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rglcl\" (UniqueName: \"kubernetes.io/projected/8173a109-e4e9-45f7-926a-1e27a50818f8-kube-api-access-rglcl\") pod \"community-operators-g5rrg\" (UID: \"8173a109-e4e9-45f7-926a-1e27a50818f8\") " pod="openshift-marketplace/community-operators-g5rrg" Sep 29 09:42:36 crc kubenswrapper[4779]: I0929 09:42:36.729417 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rglcl\" (UniqueName: \"kubernetes.io/projected/8173a109-e4e9-45f7-926a-1e27a50818f8-kube-api-access-rglcl\") pod \"community-operators-g5rrg\" (UID: \"8173a109-e4e9-45f7-926a-1e27a50818f8\") " pod="openshift-marketplace/community-operators-g5rrg" Sep 29 09:42:36 crc kubenswrapper[4779]: I0929 09:42:36.729539 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8173a109-e4e9-45f7-926a-1e27a50818f8-utilities\") pod \"community-operators-g5rrg\" (UID: \"8173a109-e4e9-45f7-926a-1e27a50818f8\") " pod="openshift-marketplace/community-operators-g5rrg" Sep 29 09:42:36 crc kubenswrapper[4779]: I0929 09:42:36.729575 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8173a109-e4e9-45f7-926a-1e27a50818f8-catalog-content\") pod \"community-operators-g5rrg\" (UID: \"8173a109-e4e9-45f7-926a-1e27a50818f8\") " pod="openshift-marketplace/community-operators-g5rrg" Sep 29 09:42:36 crc kubenswrapper[4779]: I0929 09:42:36.730039 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8173a109-e4e9-45f7-926a-1e27a50818f8-utilities\") pod \"community-operators-g5rrg\" (UID: \"8173a109-e4e9-45f7-926a-1e27a50818f8\") " pod="openshift-marketplace/community-operators-g5rrg" Sep 29 09:42:36 crc kubenswrapper[4779]: I0929 09:42:36.730380 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8173a109-e4e9-45f7-926a-1e27a50818f8-catalog-content\") pod \"community-operators-g5rrg\" (UID: \"8173a109-e4e9-45f7-926a-1e27a50818f8\") " pod="openshift-marketplace/community-operators-g5rrg" Sep 29 09:42:36 crc kubenswrapper[4779]: I0929 09:42:36.753392 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rglcl\" (UniqueName: \"kubernetes.io/projected/8173a109-e4e9-45f7-926a-1e27a50818f8-kube-api-access-rglcl\") pod \"community-operators-g5rrg\" (UID: \"8173a109-e4e9-45f7-926a-1e27a50818f8\") " pod="openshift-marketplace/community-operators-g5rrg" Sep 29 09:42:36 crc kubenswrapper[4779]: I0929 09:42:36.791566 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-g5rrg" Sep 29 09:42:37 crc kubenswrapper[4779]: I0929 09:42:37.014137 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5d688f5ffc-l6gxt" event={"ID":"c10a1808-8263-4c3f-8a8c-dc22de262eee","Type":"ContainerStarted","Data":"543d1a16574093352b508d8466617a1a307422c56632d0b4df016c2c2b8e9d44"} Sep 29 09:42:37 crc kubenswrapper[4779]: I0929 09:42:37.014445 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5d688f5ffc-l6gxt" event={"ID":"c10a1808-8263-4c3f-8a8c-dc22de262eee","Type":"ContainerStarted","Data":"418710f28f621dad64bfc87207f510d3b9196f46c38a44d4c6e19cbb9b1040dd"} Sep 29 09:42:37 crc kubenswrapper[4779]: I0929 09:42:37.014461 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5d688f5ffc-l6gxt" event={"ID":"c10a1808-8263-4c3f-8a8c-dc22de262eee","Type":"ContainerStarted","Data":"729146f6634611a416b2d1dc8fca2d0d0cbb688480c3b337216a904d88e15f30"} Sep 29 09:42:37 crc kubenswrapper[4779]: I0929 09:42:37.015357 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-5d688f5ffc-l6gxt" Sep 29 09:42:37 crc kubenswrapper[4779]: I0929 09:42:37.033671 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/b474caaa-981c-427a-890c-aab91f461a90-memberlist\") pod \"speaker-n857k\" (UID: \"b474caaa-981c-427a-890c-aab91f461a90\") " pod="metallb-system/speaker-n857k" Sep 29 09:42:37 crc kubenswrapper[4779]: I0929 09:42:37.035372 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-5d688f5ffc-l6gxt" podStartSLOduration=2.035360118 podStartE2EDuration="2.035360118s" podCreationTimestamp="2025-09-29 09:42:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:42:37.033051743 +0000 UTC m=+789.014375647" watchObservedRunningTime="2025-09-29 09:42:37.035360118 +0000 UTC m=+789.016684022" Sep 29 09:42:37 crc kubenswrapper[4779]: I0929 09:42:37.039604 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/b474caaa-981c-427a-890c-aab91f461a90-memberlist\") pod \"speaker-n857k\" (UID: \"b474caaa-981c-427a-890c-aab91f461a90\") " pod="metallb-system/speaker-n857k" Sep 29 09:42:37 crc kubenswrapper[4779]: I0929 09:42:37.062345 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-n857k" Sep 29 09:42:37 crc kubenswrapper[4779]: I0929 09:42:37.323175 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-g5rrg"] Sep 29 09:42:38 crc kubenswrapper[4779]: I0929 09:42:38.028691 4779 generic.go:334] "Generic (PLEG): container finished" podID="8173a109-e4e9-45f7-926a-1e27a50818f8" containerID="1438c5695780540a65108d528970cef3305c9b1bd6af34759c2bee81bc7264de" exitCode=0 Sep 29 09:42:38 crc kubenswrapper[4779]: I0929 09:42:38.028752 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g5rrg" event={"ID":"8173a109-e4e9-45f7-926a-1e27a50818f8","Type":"ContainerDied","Data":"1438c5695780540a65108d528970cef3305c9b1bd6af34759c2bee81bc7264de"} Sep 29 09:42:38 crc kubenswrapper[4779]: I0929 09:42:38.029059 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g5rrg" event={"ID":"8173a109-e4e9-45f7-926a-1e27a50818f8","Type":"ContainerStarted","Data":"deda4dfb9f26a6654ace0742a1003dc3f6af74f3fbcd21fd53ed937a39daf91f"} Sep 29 09:42:38 crc kubenswrapper[4779]: I0929 09:42:38.034300 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-n857k" event={"ID":"b474caaa-981c-427a-890c-aab91f461a90","Type":"ContainerStarted","Data":"6703ab747b0fab719e71eb87bf103a3b45aee6aa3883972632ab6390ce28986b"} Sep 29 09:42:38 crc kubenswrapper[4779]: I0929 09:42:38.034339 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-n857k" event={"ID":"b474caaa-981c-427a-890c-aab91f461a90","Type":"ContainerStarted","Data":"94bb11ee33587e94d76a5f0bf95baf26689c23304bd39ad7b299a0aaed314b72"} Sep 29 09:42:38 crc kubenswrapper[4779]: I0929 09:42:38.034349 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-n857k" event={"ID":"b474caaa-981c-427a-890c-aab91f461a90","Type":"ContainerStarted","Data":"84794f5d439157ae4faef4fbcead2f2cc580072cdda4d66369b234f4d89fa9d0"} Sep 29 09:42:38 crc kubenswrapper[4779]: I0929 09:42:38.034604 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-n857k" Sep 29 09:42:38 crc kubenswrapper[4779]: I0929 09:42:38.082788 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-n857k" podStartSLOduration=3.082768166 podStartE2EDuration="3.082768166s" podCreationTimestamp="2025-09-29 09:42:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:42:38.079780363 +0000 UTC m=+790.061104267" watchObservedRunningTime="2025-09-29 09:42:38.082768166 +0000 UTC m=+790.064092070" Sep 29 09:42:39 crc kubenswrapper[4779]: I0929 09:42:39.049502 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g5rrg" event={"ID":"8173a109-e4e9-45f7-926a-1e27a50818f8","Type":"ContainerStarted","Data":"c5b28a6f4b0d1ffef930051205557900164b57758022bb31459232fd75fb44dc"} Sep 29 09:42:40 crc kubenswrapper[4779]: I0929 09:42:40.057864 4779 generic.go:334] "Generic (PLEG): container finished" podID="8173a109-e4e9-45f7-926a-1e27a50818f8" containerID="c5b28a6f4b0d1ffef930051205557900164b57758022bb31459232fd75fb44dc" exitCode=0 Sep 29 09:42:40 crc kubenswrapper[4779]: I0929 09:42:40.057917 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g5rrg" event={"ID":"8173a109-e4e9-45f7-926a-1e27a50818f8","Type":"ContainerDied","Data":"c5b28a6f4b0d1ffef930051205557900164b57758022bb31459232fd75fb44dc"} Sep 29 09:42:43 crc kubenswrapper[4779]: I0929 09:42:43.090593 4779 generic.go:334] "Generic (PLEG): container finished" podID="84543eb2-59d1-4d2c-986c-5836e23a4a68" containerID="36ac46c0e0f1f0ae5a0f00d5cba1adb82b1d86a7e0ccfdb9955a738a0823b79f" exitCode=0 Sep 29 09:42:43 crc kubenswrapper[4779]: I0929 09:42:43.090663 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-hs7fd" event={"ID":"84543eb2-59d1-4d2c-986c-5836e23a4a68","Type":"ContainerDied","Data":"36ac46c0e0f1f0ae5a0f00d5cba1adb82b1d86a7e0ccfdb9955a738a0823b79f"} Sep 29 09:42:43 crc kubenswrapper[4779]: I0929 09:42:43.092496 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-j47xm" event={"ID":"148080b1-7775-4b6c-ad08-5de4695051bb","Type":"ContainerStarted","Data":"ed41da4034bf2817dce9269a8d1edb875861f886f5cae8b4ee27f259d0b74aef"} Sep 29 09:42:43 crc kubenswrapper[4779]: I0929 09:42:43.092725 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-j47xm" Sep 29 09:42:43 crc kubenswrapper[4779]: I0929 09:42:43.098959 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g5rrg" event={"ID":"8173a109-e4e9-45f7-926a-1e27a50818f8","Type":"ContainerStarted","Data":"3d60bf8c3f3a52498c20116fcb1f73397b6dbac97b2dde941f3df48b45eb048f"} Sep 29 09:42:43 crc kubenswrapper[4779]: I0929 09:42:43.147444 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-g5rrg" podStartSLOduration=2.432219367 podStartE2EDuration="7.147379088s" podCreationTimestamp="2025-09-29 09:42:36 +0000 UTC" firstStartedPulling="2025-09-29 09:42:38.031874037 +0000 UTC m=+790.013197941" lastFinishedPulling="2025-09-29 09:42:42.747033758 +0000 UTC m=+794.728357662" observedRunningTime="2025-09-29 09:42:43.147154541 +0000 UTC m=+795.128478455" watchObservedRunningTime="2025-09-29 09:42:43.147379088 +0000 UTC m=+795.128703012" Sep 29 09:42:43 crc kubenswrapper[4779]: I0929 09:42:43.149816 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-j47xm" podStartSLOduration=1.273952631 podStartE2EDuration="8.149805025s" podCreationTimestamp="2025-09-29 09:42:35 +0000 UTC" firstStartedPulling="2025-09-29 09:42:35.872783719 +0000 UTC m=+787.854107663" lastFinishedPulling="2025-09-29 09:42:42.748636153 +0000 UTC m=+794.729960057" observedRunningTime="2025-09-29 09:42:43.127460212 +0000 UTC m=+795.108784156" watchObservedRunningTime="2025-09-29 09:42:43.149805025 +0000 UTC m=+795.131128929" Sep 29 09:42:44 crc kubenswrapper[4779]: I0929 09:42:44.109096 4779 generic.go:334] "Generic (PLEG): container finished" podID="84543eb2-59d1-4d2c-986c-5836e23a4a68" containerID="259ac819ea51fd45c8523ad38144afb572178d72f350caf6becb31c6ff04a45d" exitCode=0 Sep 29 09:42:44 crc kubenswrapper[4779]: I0929 09:42:44.109155 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-hs7fd" event={"ID":"84543eb2-59d1-4d2c-986c-5836e23a4a68","Type":"ContainerDied","Data":"259ac819ea51fd45c8523ad38144afb572178d72f350caf6becb31c6ff04a45d"} Sep 29 09:42:45 crc kubenswrapper[4779]: I0929 09:42:45.115922 4779 generic.go:334] "Generic (PLEG): container finished" podID="84543eb2-59d1-4d2c-986c-5836e23a4a68" containerID="2588e6a5bce7bd9d1e8634a624d2c1154ec58b3d59d975ceeee38b1410427a08" exitCode=0 Sep 29 09:42:45 crc kubenswrapper[4779]: I0929 09:42:45.115976 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-hs7fd" event={"ID":"84543eb2-59d1-4d2c-986c-5836e23a4a68","Type":"ContainerDied","Data":"2588e6a5bce7bd9d1e8634a624d2c1154ec58b3d59d975ceeee38b1410427a08"} Sep 29 09:42:46 crc kubenswrapper[4779]: I0929 09:42:46.792781 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-g5rrg" Sep 29 09:42:46 crc kubenswrapper[4779]: I0929 09:42:46.794539 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-g5rrg" Sep 29 09:42:46 crc kubenswrapper[4779]: I0929 09:42:46.848853 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-g5rrg" Sep 29 09:42:46 crc kubenswrapper[4779]: I0929 09:42:46.966383 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 09:42:46 crc kubenswrapper[4779]: I0929 09:42:46.966878 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 09:42:47 crc kubenswrapper[4779]: I0929 09:42:47.076357 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-n857k" Sep 29 09:42:47 crc kubenswrapper[4779]: I0929 09:42:47.132722 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-hs7fd" event={"ID":"84543eb2-59d1-4d2c-986c-5836e23a4a68","Type":"ContainerStarted","Data":"145e850a1c6c37ee26711791a95bb5be9fb699066c6bddf6cbceaacc194bb849"} Sep 29 09:42:47 crc kubenswrapper[4779]: I0929 09:42:47.132761 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-hs7fd" event={"ID":"84543eb2-59d1-4d2c-986c-5836e23a4a68","Type":"ContainerStarted","Data":"fff8ca87bbc294eb770716008b4d3779a6675852ef6a4a36a3ac713f4315c786"} Sep 29 09:42:47 crc kubenswrapper[4779]: I0929 09:42:47.132771 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-hs7fd" event={"ID":"84543eb2-59d1-4d2c-986c-5836e23a4a68","Type":"ContainerStarted","Data":"fe5767fe441066544300a8896a9b55ecb6123335d5e332ee68ddb4360ffd545f"} Sep 29 09:42:47 crc kubenswrapper[4779]: I0929 09:42:47.132782 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-hs7fd" event={"ID":"84543eb2-59d1-4d2c-986c-5836e23a4a68","Type":"ContainerStarted","Data":"e1e6213fb61f98eeb024676334dcf6d5a676c0a5e3c7b798c49e0e7a4d931d10"} Sep 29 09:42:47 crc kubenswrapper[4779]: I0929 09:42:47.132792 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-hs7fd" event={"ID":"84543eb2-59d1-4d2c-986c-5836e23a4a68","Type":"ContainerStarted","Data":"5f637fb80c3c3f70a4af7a16ade4493e368f4040518cca883f9bd333a9f282e6"} Sep 29 09:42:47 crc kubenswrapper[4779]: I0929 09:42:47.185473 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-g5rrg" Sep 29 09:42:47 crc kubenswrapper[4779]: I0929 09:42:47.232166 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-g5rrg"] Sep 29 09:42:48 crc kubenswrapper[4779]: I0929 09:42:48.142052 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-hs7fd" event={"ID":"84543eb2-59d1-4d2c-986c-5836e23a4a68","Type":"ContainerStarted","Data":"e8e23d21d82580a2c8dffccaa2b60efebd6696dddb731006c7d8b3b037b0fbb0"} Sep 29 09:42:48 crc kubenswrapper[4779]: I0929 09:42:48.143513 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-hs7fd" Sep 29 09:42:48 crc kubenswrapper[4779]: I0929 09:42:48.169122 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-hs7fd" podStartSLOduration=6.035444716 podStartE2EDuration="13.169105466s" podCreationTimestamp="2025-09-29 09:42:35 +0000 UTC" firstStartedPulling="2025-09-29 09:42:35.660837982 +0000 UTC m=+787.642161876" lastFinishedPulling="2025-09-29 09:42:42.794498722 +0000 UTC m=+794.775822626" observedRunningTime="2025-09-29 09:42:48.165382492 +0000 UTC m=+800.146706406" watchObservedRunningTime="2025-09-29 09:42:48.169105466 +0000 UTC m=+800.150429380" Sep 29 09:42:49 crc kubenswrapper[4779]: I0929 09:42:49.148184 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-g5rrg" podUID="8173a109-e4e9-45f7-926a-1e27a50818f8" containerName="registry-server" containerID="cri-o://3d60bf8c3f3a52498c20116fcb1f73397b6dbac97b2dde941f3df48b45eb048f" gracePeriod=2 Sep 29 09:42:49 crc kubenswrapper[4779]: I0929 09:42:49.569187 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-g5rrg" Sep 29 09:42:49 crc kubenswrapper[4779]: I0929 09:42:49.703754 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rglcl\" (UniqueName: \"kubernetes.io/projected/8173a109-e4e9-45f7-926a-1e27a50818f8-kube-api-access-rglcl\") pod \"8173a109-e4e9-45f7-926a-1e27a50818f8\" (UID: \"8173a109-e4e9-45f7-926a-1e27a50818f8\") " Sep 29 09:42:49 crc kubenswrapper[4779]: I0929 09:42:49.703815 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8173a109-e4e9-45f7-926a-1e27a50818f8-utilities\") pod \"8173a109-e4e9-45f7-926a-1e27a50818f8\" (UID: \"8173a109-e4e9-45f7-926a-1e27a50818f8\") " Sep 29 09:42:49 crc kubenswrapper[4779]: I0929 09:42:49.703870 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8173a109-e4e9-45f7-926a-1e27a50818f8-catalog-content\") pod \"8173a109-e4e9-45f7-926a-1e27a50818f8\" (UID: \"8173a109-e4e9-45f7-926a-1e27a50818f8\") " Sep 29 09:42:49 crc kubenswrapper[4779]: I0929 09:42:49.704884 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8173a109-e4e9-45f7-926a-1e27a50818f8-utilities" (OuterVolumeSpecName: "utilities") pod "8173a109-e4e9-45f7-926a-1e27a50818f8" (UID: "8173a109-e4e9-45f7-926a-1e27a50818f8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:42:49 crc kubenswrapper[4779]: I0929 09:42:49.718607 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8173a109-e4e9-45f7-926a-1e27a50818f8-kube-api-access-rglcl" (OuterVolumeSpecName: "kube-api-access-rglcl") pod "8173a109-e4e9-45f7-926a-1e27a50818f8" (UID: "8173a109-e4e9-45f7-926a-1e27a50818f8"). InnerVolumeSpecName "kube-api-access-rglcl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:42:49 crc kubenswrapper[4779]: I0929 09:42:49.804928 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rglcl\" (UniqueName: \"kubernetes.io/projected/8173a109-e4e9-45f7-926a-1e27a50818f8-kube-api-access-rglcl\") on node \"crc\" DevicePath \"\"" Sep 29 09:42:49 crc kubenswrapper[4779]: I0929 09:42:49.805146 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8173a109-e4e9-45f7-926a-1e27a50818f8-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 09:42:49 crc kubenswrapper[4779]: I0929 09:42:49.894962 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-cfnz8"] Sep 29 09:42:49 crc kubenswrapper[4779]: E0929 09:42:49.895213 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8173a109-e4e9-45f7-926a-1e27a50818f8" containerName="registry-server" Sep 29 09:42:49 crc kubenswrapper[4779]: I0929 09:42:49.895229 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="8173a109-e4e9-45f7-926a-1e27a50818f8" containerName="registry-server" Sep 29 09:42:49 crc kubenswrapper[4779]: E0929 09:42:49.895252 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8173a109-e4e9-45f7-926a-1e27a50818f8" containerName="extract-content" Sep 29 09:42:49 crc kubenswrapper[4779]: I0929 09:42:49.895259 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="8173a109-e4e9-45f7-926a-1e27a50818f8" containerName="extract-content" Sep 29 09:42:49 crc kubenswrapper[4779]: E0929 09:42:49.895267 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8173a109-e4e9-45f7-926a-1e27a50818f8" containerName="extract-utilities" Sep 29 09:42:49 crc kubenswrapper[4779]: I0929 09:42:49.895273 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="8173a109-e4e9-45f7-926a-1e27a50818f8" containerName="extract-utilities" Sep 29 09:42:49 crc kubenswrapper[4779]: I0929 09:42:49.895382 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="8173a109-e4e9-45f7-926a-1e27a50818f8" containerName="registry-server" Sep 29 09:42:49 crc kubenswrapper[4779]: I0929 09:42:49.896150 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cfnz8" Sep 29 09:42:49 crc kubenswrapper[4779]: I0929 09:42:49.913080 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cfnz8"] Sep 29 09:42:50 crc kubenswrapper[4779]: I0929 09:42:50.007737 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ecb3b568-1d84-4314-bd21-4f1f3d861d03-utilities\") pod \"redhat-operators-cfnz8\" (UID: \"ecb3b568-1d84-4314-bd21-4f1f3d861d03\") " pod="openshift-marketplace/redhat-operators-cfnz8" Sep 29 09:42:50 crc kubenswrapper[4779]: I0929 09:42:50.007790 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ecb3b568-1d84-4314-bd21-4f1f3d861d03-catalog-content\") pod \"redhat-operators-cfnz8\" (UID: \"ecb3b568-1d84-4314-bd21-4f1f3d861d03\") " pod="openshift-marketplace/redhat-operators-cfnz8" Sep 29 09:42:50 crc kubenswrapper[4779]: I0929 09:42:50.007882 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kj74g\" (UniqueName: \"kubernetes.io/projected/ecb3b568-1d84-4314-bd21-4f1f3d861d03-kube-api-access-kj74g\") pod \"redhat-operators-cfnz8\" (UID: \"ecb3b568-1d84-4314-bd21-4f1f3d861d03\") " pod="openshift-marketplace/redhat-operators-cfnz8" Sep 29 09:42:50 crc kubenswrapper[4779]: I0929 09:42:50.108808 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ecb3b568-1d84-4314-bd21-4f1f3d861d03-catalog-content\") pod \"redhat-operators-cfnz8\" (UID: \"ecb3b568-1d84-4314-bd21-4f1f3d861d03\") " pod="openshift-marketplace/redhat-operators-cfnz8" Sep 29 09:42:50 crc kubenswrapper[4779]: I0929 09:42:50.108897 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kj74g\" (UniqueName: \"kubernetes.io/projected/ecb3b568-1d84-4314-bd21-4f1f3d861d03-kube-api-access-kj74g\") pod \"redhat-operators-cfnz8\" (UID: \"ecb3b568-1d84-4314-bd21-4f1f3d861d03\") " pod="openshift-marketplace/redhat-operators-cfnz8" Sep 29 09:42:50 crc kubenswrapper[4779]: I0929 09:42:50.108965 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ecb3b568-1d84-4314-bd21-4f1f3d861d03-utilities\") pod \"redhat-operators-cfnz8\" (UID: \"ecb3b568-1d84-4314-bd21-4f1f3d861d03\") " pod="openshift-marketplace/redhat-operators-cfnz8" Sep 29 09:42:50 crc kubenswrapper[4779]: I0929 09:42:50.109279 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ecb3b568-1d84-4314-bd21-4f1f3d861d03-catalog-content\") pod \"redhat-operators-cfnz8\" (UID: \"ecb3b568-1d84-4314-bd21-4f1f3d861d03\") " pod="openshift-marketplace/redhat-operators-cfnz8" Sep 29 09:42:50 crc kubenswrapper[4779]: I0929 09:42:50.109299 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ecb3b568-1d84-4314-bd21-4f1f3d861d03-utilities\") pod \"redhat-operators-cfnz8\" (UID: \"ecb3b568-1d84-4314-bd21-4f1f3d861d03\") " pod="openshift-marketplace/redhat-operators-cfnz8" Sep 29 09:42:50 crc kubenswrapper[4779]: I0929 09:42:50.127243 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8173a109-e4e9-45f7-926a-1e27a50818f8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8173a109-e4e9-45f7-926a-1e27a50818f8" (UID: "8173a109-e4e9-45f7-926a-1e27a50818f8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:42:50 crc kubenswrapper[4779]: I0929 09:42:50.128893 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kj74g\" (UniqueName: \"kubernetes.io/projected/ecb3b568-1d84-4314-bd21-4f1f3d861d03-kube-api-access-kj74g\") pod \"redhat-operators-cfnz8\" (UID: \"ecb3b568-1d84-4314-bd21-4f1f3d861d03\") " pod="openshift-marketplace/redhat-operators-cfnz8" Sep 29 09:42:50 crc kubenswrapper[4779]: I0929 09:42:50.157352 4779 generic.go:334] "Generic (PLEG): container finished" podID="8173a109-e4e9-45f7-926a-1e27a50818f8" containerID="3d60bf8c3f3a52498c20116fcb1f73397b6dbac97b2dde941f3df48b45eb048f" exitCode=0 Sep 29 09:42:50 crc kubenswrapper[4779]: I0929 09:42:50.157409 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g5rrg" event={"ID":"8173a109-e4e9-45f7-926a-1e27a50818f8","Type":"ContainerDied","Data":"3d60bf8c3f3a52498c20116fcb1f73397b6dbac97b2dde941f3df48b45eb048f"} Sep 29 09:42:50 crc kubenswrapper[4779]: I0929 09:42:50.157449 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g5rrg" event={"ID":"8173a109-e4e9-45f7-926a-1e27a50818f8","Type":"ContainerDied","Data":"deda4dfb9f26a6654ace0742a1003dc3f6af74f3fbcd21fd53ed937a39daf91f"} Sep 29 09:42:50 crc kubenswrapper[4779]: I0929 09:42:50.157460 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-g5rrg" Sep 29 09:42:50 crc kubenswrapper[4779]: I0929 09:42:50.157472 4779 scope.go:117] "RemoveContainer" containerID="3d60bf8c3f3a52498c20116fcb1f73397b6dbac97b2dde941f3df48b45eb048f" Sep 29 09:42:50 crc kubenswrapper[4779]: I0929 09:42:50.188462 4779 scope.go:117] "RemoveContainer" containerID="c5b28a6f4b0d1ffef930051205557900164b57758022bb31459232fd75fb44dc" Sep 29 09:42:50 crc kubenswrapper[4779]: I0929 09:42:50.188619 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-g5rrg"] Sep 29 09:42:50 crc kubenswrapper[4779]: I0929 09:42:50.193539 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-g5rrg"] Sep 29 09:42:50 crc kubenswrapper[4779]: I0929 09:42:50.206993 4779 scope.go:117] "RemoveContainer" containerID="1438c5695780540a65108d528970cef3305c9b1bd6af34759c2bee81bc7264de" Sep 29 09:42:50 crc kubenswrapper[4779]: I0929 09:42:50.212373 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8173a109-e4e9-45f7-926a-1e27a50818f8-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 09:42:50 crc kubenswrapper[4779]: I0929 09:42:50.218440 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cfnz8" Sep 29 09:42:50 crc kubenswrapper[4779]: I0929 09:42:50.225072 4779 scope.go:117] "RemoveContainer" containerID="3d60bf8c3f3a52498c20116fcb1f73397b6dbac97b2dde941f3df48b45eb048f" Sep 29 09:42:50 crc kubenswrapper[4779]: E0929 09:42:50.225816 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3d60bf8c3f3a52498c20116fcb1f73397b6dbac97b2dde941f3df48b45eb048f\": container with ID starting with 3d60bf8c3f3a52498c20116fcb1f73397b6dbac97b2dde941f3df48b45eb048f not found: ID does not exist" containerID="3d60bf8c3f3a52498c20116fcb1f73397b6dbac97b2dde941f3df48b45eb048f" Sep 29 09:42:50 crc kubenswrapper[4779]: I0929 09:42:50.225859 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d60bf8c3f3a52498c20116fcb1f73397b6dbac97b2dde941f3df48b45eb048f"} err="failed to get container status \"3d60bf8c3f3a52498c20116fcb1f73397b6dbac97b2dde941f3df48b45eb048f\": rpc error: code = NotFound desc = could not find container \"3d60bf8c3f3a52498c20116fcb1f73397b6dbac97b2dde941f3df48b45eb048f\": container with ID starting with 3d60bf8c3f3a52498c20116fcb1f73397b6dbac97b2dde941f3df48b45eb048f not found: ID does not exist" Sep 29 09:42:50 crc kubenswrapper[4779]: I0929 09:42:50.225980 4779 scope.go:117] "RemoveContainer" containerID="c5b28a6f4b0d1ffef930051205557900164b57758022bb31459232fd75fb44dc" Sep 29 09:42:50 crc kubenswrapper[4779]: E0929 09:42:50.226451 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c5b28a6f4b0d1ffef930051205557900164b57758022bb31459232fd75fb44dc\": container with ID starting with c5b28a6f4b0d1ffef930051205557900164b57758022bb31459232fd75fb44dc not found: ID does not exist" containerID="c5b28a6f4b0d1ffef930051205557900164b57758022bb31459232fd75fb44dc" Sep 29 09:42:50 crc kubenswrapper[4779]: I0929 09:42:50.226522 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c5b28a6f4b0d1ffef930051205557900164b57758022bb31459232fd75fb44dc"} err="failed to get container status \"c5b28a6f4b0d1ffef930051205557900164b57758022bb31459232fd75fb44dc\": rpc error: code = NotFound desc = could not find container \"c5b28a6f4b0d1ffef930051205557900164b57758022bb31459232fd75fb44dc\": container with ID starting with c5b28a6f4b0d1ffef930051205557900164b57758022bb31459232fd75fb44dc not found: ID does not exist" Sep 29 09:42:50 crc kubenswrapper[4779]: I0929 09:42:50.226559 4779 scope.go:117] "RemoveContainer" containerID="1438c5695780540a65108d528970cef3305c9b1bd6af34759c2bee81bc7264de" Sep 29 09:42:50 crc kubenswrapper[4779]: E0929 09:42:50.227016 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1438c5695780540a65108d528970cef3305c9b1bd6af34759c2bee81bc7264de\": container with ID starting with 1438c5695780540a65108d528970cef3305c9b1bd6af34759c2bee81bc7264de not found: ID does not exist" containerID="1438c5695780540a65108d528970cef3305c9b1bd6af34759c2bee81bc7264de" Sep 29 09:42:50 crc kubenswrapper[4779]: I0929 09:42:50.227059 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1438c5695780540a65108d528970cef3305c9b1bd6af34759c2bee81bc7264de"} err="failed to get container status \"1438c5695780540a65108d528970cef3305c9b1bd6af34759c2bee81bc7264de\": rpc error: code = NotFound desc = could not find container \"1438c5695780540a65108d528970cef3305c9b1bd6af34759c2bee81bc7264de\": container with ID starting with 1438c5695780540a65108d528970cef3305c9b1bd6af34759c2bee81bc7264de not found: ID does not exist" Sep 29 09:42:50 crc kubenswrapper[4779]: I0929 09:42:50.497084 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-hs7fd" Sep 29 09:42:50 crc kubenswrapper[4779]: I0929 09:42:50.538087 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-hs7fd" Sep 29 09:42:50 crc kubenswrapper[4779]: I0929 09:42:50.707518 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cfnz8"] Sep 29 09:42:50 crc kubenswrapper[4779]: I0929 09:42:50.726825 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8173a109-e4e9-45f7-926a-1e27a50818f8" path="/var/lib/kubelet/pods/8173a109-e4e9-45f7-926a-1e27a50818f8/volumes" Sep 29 09:42:51 crc kubenswrapper[4779]: I0929 09:42:51.165172 4779 generic.go:334] "Generic (PLEG): container finished" podID="ecb3b568-1d84-4314-bd21-4f1f3d861d03" containerID="1829085d94c688bcb0bb756ce5df5a56d30b100159f0a0ff1f25bec59d012106" exitCode=0 Sep 29 09:42:51 crc kubenswrapper[4779]: I0929 09:42:51.165236 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cfnz8" event={"ID":"ecb3b568-1d84-4314-bd21-4f1f3d861d03","Type":"ContainerDied","Data":"1829085d94c688bcb0bb756ce5df5a56d30b100159f0a0ff1f25bec59d012106"} Sep 29 09:42:51 crc kubenswrapper[4779]: I0929 09:42:51.165264 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cfnz8" event={"ID":"ecb3b568-1d84-4314-bd21-4f1f3d861d03","Type":"ContainerStarted","Data":"5cb62996983a0f4abfea927448e169fb9aa78c9fd8f67fde82b312c36e452219"} Sep 29 09:42:52 crc kubenswrapper[4779]: I0929 09:42:52.172551 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cfnz8" event={"ID":"ecb3b568-1d84-4314-bd21-4f1f3d861d03","Type":"ContainerStarted","Data":"095404260ab278e337d3123b15deaa743b851d0eea348360dff5bb8ccf4810f9"} Sep 29 09:42:53 crc kubenswrapper[4779]: I0929 09:42:53.180576 4779 generic.go:334] "Generic (PLEG): container finished" podID="ecb3b568-1d84-4314-bd21-4f1f3d861d03" containerID="095404260ab278e337d3123b15deaa743b851d0eea348360dff5bb8ccf4810f9" exitCode=0 Sep 29 09:42:53 crc kubenswrapper[4779]: I0929 09:42:53.180628 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cfnz8" event={"ID":"ecb3b568-1d84-4314-bd21-4f1f3d861d03","Type":"ContainerDied","Data":"095404260ab278e337d3123b15deaa743b851d0eea348360dff5bb8ccf4810f9"} Sep 29 09:42:54 crc kubenswrapper[4779]: I0929 09:42:54.189099 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cfnz8" event={"ID":"ecb3b568-1d84-4314-bd21-4f1f3d861d03","Type":"ContainerStarted","Data":"2961c5d7994d6321dee5696029fd82776b261957b4980f6859e2a56953c31df8"} Sep 29 09:42:54 crc kubenswrapper[4779]: I0929 09:42:54.205669 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-cfnz8" podStartSLOduration=2.499875725 podStartE2EDuration="5.205653052s" podCreationTimestamp="2025-09-29 09:42:49 +0000 UTC" firstStartedPulling="2025-09-29 09:42:51.167444828 +0000 UTC m=+803.148768742" lastFinishedPulling="2025-09-29 09:42:53.873222165 +0000 UTC m=+805.854546069" observedRunningTime="2025-09-29 09:42:54.204277354 +0000 UTC m=+806.185601268" watchObservedRunningTime="2025-09-29 09:42:54.205653052 +0000 UTC m=+806.186976956" Sep 29 09:42:55 crc kubenswrapper[4779]: I0929 09:42:55.489811 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-j47xm" Sep 29 09:42:55 crc kubenswrapper[4779]: I0929 09:42:55.586801 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-5d688f5ffc-l6gxt" Sep 29 09:42:57 crc kubenswrapper[4779]: I0929 09:42:57.498926 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-hj95j"] Sep 29 09:42:57 crc kubenswrapper[4779]: I0929 09:42:57.501010 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-hj95j" Sep 29 09:42:57 crc kubenswrapper[4779]: I0929 09:42:57.505366 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-pc74r" Sep 29 09:42:57 crc kubenswrapper[4779]: I0929 09:42:57.512187 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-hj95j"] Sep 29 09:42:57 crc kubenswrapper[4779]: I0929 09:42:57.512265 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Sep 29 09:42:57 crc kubenswrapper[4779]: I0929 09:42:57.512605 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Sep 29 09:42:57 crc kubenswrapper[4779]: I0929 09:42:57.613847 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h6l8g\" (UniqueName: \"kubernetes.io/projected/8ead15f2-8f4d-49ff-bd74-a535f25bad67-kube-api-access-h6l8g\") pod \"openstack-operator-index-hj95j\" (UID: \"8ead15f2-8f4d-49ff-bd74-a535f25bad67\") " pod="openstack-operators/openstack-operator-index-hj95j" Sep 29 09:42:57 crc kubenswrapper[4779]: I0929 09:42:57.714861 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h6l8g\" (UniqueName: \"kubernetes.io/projected/8ead15f2-8f4d-49ff-bd74-a535f25bad67-kube-api-access-h6l8g\") pod \"openstack-operator-index-hj95j\" (UID: \"8ead15f2-8f4d-49ff-bd74-a535f25bad67\") " pod="openstack-operators/openstack-operator-index-hj95j" Sep 29 09:42:57 crc kubenswrapper[4779]: I0929 09:42:57.738156 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h6l8g\" (UniqueName: \"kubernetes.io/projected/8ead15f2-8f4d-49ff-bd74-a535f25bad67-kube-api-access-h6l8g\") pod \"openstack-operator-index-hj95j\" (UID: \"8ead15f2-8f4d-49ff-bd74-a535f25bad67\") " pod="openstack-operators/openstack-operator-index-hj95j" Sep 29 09:42:57 crc kubenswrapper[4779]: I0929 09:42:57.820190 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-hj95j" Sep 29 09:42:58 crc kubenswrapper[4779]: I0929 09:42:58.236531 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-hj95j"] Sep 29 09:42:58 crc kubenswrapper[4779]: W0929 09:42:58.238626 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8ead15f2_8f4d_49ff_bd74_a535f25bad67.slice/crio-704fc0b0e72cf731716aa662cbcfe1bc50eb7d7381ff864c9e2794c75d62c80f WatchSource:0}: Error finding container 704fc0b0e72cf731716aa662cbcfe1bc50eb7d7381ff864c9e2794c75d62c80f: Status 404 returned error can't find the container with id 704fc0b0e72cf731716aa662cbcfe1bc50eb7d7381ff864c9e2794c75d62c80f Sep 29 09:42:59 crc kubenswrapper[4779]: I0929 09:42:59.220099 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-hj95j" event={"ID":"8ead15f2-8f4d-49ff-bd74-a535f25bad67","Type":"ContainerStarted","Data":"704fc0b0e72cf731716aa662cbcfe1bc50eb7d7381ff864c9e2794c75d62c80f"} Sep 29 09:43:00 crc kubenswrapper[4779]: I0929 09:43:00.220129 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-cfnz8" Sep 29 09:43:00 crc kubenswrapper[4779]: I0929 09:43:00.220187 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-cfnz8" Sep 29 09:43:00 crc kubenswrapper[4779]: I0929 09:43:00.273963 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-cfnz8" Sep 29 09:43:01 crc kubenswrapper[4779]: I0929 09:43:01.237811 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-hj95j" event={"ID":"8ead15f2-8f4d-49ff-bd74-a535f25bad67","Type":"ContainerStarted","Data":"0c2bc6eecfe4b9c11927fa8b90514eb95ee484be2c1a9fffb9b5e5df06d7e023"} Sep 29 09:43:01 crc kubenswrapper[4779]: I0929 09:43:01.267721 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-hj95j" podStartSLOduration=1.852598481 podStartE2EDuration="4.267692896s" podCreationTimestamp="2025-09-29 09:42:57 +0000 UTC" firstStartedPulling="2025-09-29 09:42:58.241194678 +0000 UTC m=+810.222518582" lastFinishedPulling="2025-09-29 09:43:00.656289093 +0000 UTC m=+812.637612997" observedRunningTime="2025-09-29 09:43:01.259785815 +0000 UTC m=+813.241109739" watchObservedRunningTime="2025-09-29 09:43:01.267692896 +0000 UTC m=+813.249016840" Sep 29 09:43:01 crc kubenswrapper[4779]: I0929 09:43:01.286479 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-cfnz8" Sep 29 09:43:02 crc kubenswrapper[4779]: I0929 09:43:02.702149 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-pbqjn"] Sep 29 09:43:02 crc kubenswrapper[4779]: I0929 09:43:02.705052 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pbqjn" Sep 29 09:43:02 crc kubenswrapper[4779]: I0929 09:43:02.731019 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-pbqjn"] Sep 29 09:43:02 crc kubenswrapper[4779]: I0929 09:43:02.796705 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kwp2h\" (UniqueName: \"kubernetes.io/projected/a4cbe52e-7019-4d83-b902-1b3bad442f0b-kube-api-access-kwp2h\") pod \"certified-operators-pbqjn\" (UID: \"a4cbe52e-7019-4d83-b902-1b3bad442f0b\") " pod="openshift-marketplace/certified-operators-pbqjn" Sep 29 09:43:02 crc kubenswrapper[4779]: I0929 09:43:02.796899 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4cbe52e-7019-4d83-b902-1b3bad442f0b-catalog-content\") pod \"certified-operators-pbqjn\" (UID: \"a4cbe52e-7019-4d83-b902-1b3bad442f0b\") " pod="openshift-marketplace/certified-operators-pbqjn" Sep 29 09:43:02 crc kubenswrapper[4779]: I0929 09:43:02.797018 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4cbe52e-7019-4d83-b902-1b3bad442f0b-utilities\") pod \"certified-operators-pbqjn\" (UID: \"a4cbe52e-7019-4d83-b902-1b3bad442f0b\") " pod="openshift-marketplace/certified-operators-pbqjn" Sep 29 09:43:02 crc kubenswrapper[4779]: I0929 09:43:02.898728 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4cbe52e-7019-4d83-b902-1b3bad442f0b-catalog-content\") pod \"certified-operators-pbqjn\" (UID: \"a4cbe52e-7019-4d83-b902-1b3bad442f0b\") " pod="openshift-marketplace/certified-operators-pbqjn" Sep 29 09:43:02 crc kubenswrapper[4779]: I0929 09:43:02.898803 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4cbe52e-7019-4d83-b902-1b3bad442f0b-utilities\") pod \"certified-operators-pbqjn\" (UID: \"a4cbe52e-7019-4d83-b902-1b3bad442f0b\") " pod="openshift-marketplace/certified-operators-pbqjn" Sep 29 09:43:02 crc kubenswrapper[4779]: I0929 09:43:02.898824 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kwp2h\" (UniqueName: \"kubernetes.io/projected/a4cbe52e-7019-4d83-b902-1b3bad442f0b-kube-api-access-kwp2h\") pod \"certified-operators-pbqjn\" (UID: \"a4cbe52e-7019-4d83-b902-1b3bad442f0b\") " pod="openshift-marketplace/certified-operators-pbqjn" Sep 29 09:43:02 crc kubenswrapper[4779]: I0929 09:43:02.899411 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4cbe52e-7019-4d83-b902-1b3bad442f0b-utilities\") pod \"certified-operators-pbqjn\" (UID: \"a4cbe52e-7019-4d83-b902-1b3bad442f0b\") " pod="openshift-marketplace/certified-operators-pbqjn" Sep 29 09:43:02 crc kubenswrapper[4779]: I0929 09:43:02.899406 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4cbe52e-7019-4d83-b902-1b3bad442f0b-catalog-content\") pod \"certified-operators-pbqjn\" (UID: \"a4cbe52e-7019-4d83-b902-1b3bad442f0b\") " pod="openshift-marketplace/certified-operators-pbqjn" Sep 29 09:43:02 crc kubenswrapper[4779]: I0929 09:43:02.916847 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kwp2h\" (UniqueName: \"kubernetes.io/projected/a4cbe52e-7019-4d83-b902-1b3bad442f0b-kube-api-access-kwp2h\") pod \"certified-operators-pbqjn\" (UID: \"a4cbe52e-7019-4d83-b902-1b3bad442f0b\") " pod="openshift-marketplace/certified-operators-pbqjn" Sep 29 09:43:03 crc kubenswrapper[4779]: I0929 09:43:03.028226 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pbqjn" Sep 29 09:43:03 crc kubenswrapper[4779]: I0929 09:43:03.471037 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-pbqjn"] Sep 29 09:43:04 crc kubenswrapper[4779]: I0929 09:43:04.263310 4779 generic.go:334] "Generic (PLEG): container finished" podID="a4cbe52e-7019-4d83-b902-1b3bad442f0b" containerID="253b8d7aa01ede6beeb74cad76e4f3e84cfe0f64f127fc13a972b4ef936fe4bd" exitCode=0 Sep 29 09:43:04 crc kubenswrapper[4779]: I0929 09:43:04.263394 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pbqjn" event={"ID":"a4cbe52e-7019-4d83-b902-1b3bad442f0b","Type":"ContainerDied","Data":"253b8d7aa01ede6beeb74cad76e4f3e84cfe0f64f127fc13a972b4ef936fe4bd"} Sep 29 09:43:04 crc kubenswrapper[4779]: I0929 09:43:04.263699 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pbqjn" event={"ID":"a4cbe52e-7019-4d83-b902-1b3bad442f0b","Type":"ContainerStarted","Data":"26ccf574ca6e701f65fca654d372a321dc6d67155cb9d30448978c32a0e199c3"} Sep 29 09:43:05 crc kubenswrapper[4779]: I0929 09:43:05.273551 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pbqjn" event={"ID":"a4cbe52e-7019-4d83-b902-1b3bad442f0b","Type":"ContainerStarted","Data":"c4b830dfb699bd7a8b06679df65fdf1d3257ac3f6c53acdef64f6d6b8bd2b507"} Sep 29 09:43:05 crc kubenswrapper[4779]: I0929 09:43:05.500239 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-hs7fd" Sep 29 09:43:05 crc kubenswrapper[4779]: I0929 09:43:05.884821 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-cfnz8"] Sep 29 09:43:05 crc kubenswrapper[4779]: I0929 09:43:05.885337 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-cfnz8" podUID="ecb3b568-1d84-4314-bd21-4f1f3d861d03" containerName="registry-server" containerID="cri-o://2961c5d7994d6321dee5696029fd82776b261957b4980f6859e2a56953c31df8" gracePeriod=2 Sep 29 09:43:06 crc kubenswrapper[4779]: I0929 09:43:06.285915 4779 generic.go:334] "Generic (PLEG): container finished" podID="ecb3b568-1d84-4314-bd21-4f1f3d861d03" containerID="2961c5d7994d6321dee5696029fd82776b261957b4980f6859e2a56953c31df8" exitCode=0 Sep 29 09:43:06 crc kubenswrapper[4779]: I0929 09:43:06.285929 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cfnz8" event={"ID":"ecb3b568-1d84-4314-bd21-4f1f3d861d03","Type":"ContainerDied","Data":"2961c5d7994d6321dee5696029fd82776b261957b4980f6859e2a56953c31df8"} Sep 29 09:43:06 crc kubenswrapper[4779]: I0929 09:43:06.291403 4779 generic.go:334] "Generic (PLEG): container finished" podID="a4cbe52e-7019-4d83-b902-1b3bad442f0b" containerID="c4b830dfb699bd7a8b06679df65fdf1d3257ac3f6c53acdef64f6d6b8bd2b507" exitCode=0 Sep 29 09:43:06 crc kubenswrapper[4779]: I0929 09:43:06.291450 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pbqjn" event={"ID":"a4cbe52e-7019-4d83-b902-1b3bad442f0b","Type":"ContainerDied","Data":"c4b830dfb699bd7a8b06679df65fdf1d3257ac3f6c53acdef64f6d6b8bd2b507"} Sep 29 09:43:06 crc kubenswrapper[4779]: I0929 09:43:06.349395 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cfnz8" Sep 29 09:43:06 crc kubenswrapper[4779]: I0929 09:43:06.448011 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kj74g\" (UniqueName: \"kubernetes.io/projected/ecb3b568-1d84-4314-bd21-4f1f3d861d03-kube-api-access-kj74g\") pod \"ecb3b568-1d84-4314-bd21-4f1f3d861d03\" (UID: \"ecb3b568-1d84-4314-bd21-4f1f3d861d03\") " Sep 29 09:43:06 crc kubenswrapper[4779]: I0929 09:43:06.448179 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ecb3b568-1d84-4314-bd21-4f1f3d861d03-utilities\") pod \"ecb3b568-1d84-4314-bd21-4f1f3d861d03\" (UID: \"ecb3b568-1d84-4314-bd21-4f1f3d861d03\") " Sep 29 09:43:06 crc kubenswrapper[4779]: I0929 09:43:06.448247 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ecb3b568-1d84-4314-bd21-4f1f3d861d03-catalog-content\") pod \"ecb3b568-1d84-4314-bd21-4f1f3d861d03\" (UID: \"ecb3b568-1d84-4314-bd21-4f1f3d861d03\") " Sep 29 09:43:06 crc kubenswrapper[4779]: I0929 09:43:06.449090 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ecb3b568-1d84-4314-bd21-4f1f3d861d03-utilities" (OuterVolumeSpecName: "utilities") pod "ecb3b568-1d84-4314-bd21-4f1f3d861d03" (UID: "ecb3b568-1d84-4314-bd21-4f1f3d861d03"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:43:06 crc kubenswrapper[4779]: I0929 09:43:06.453665 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ecb3b568-1d84-4314-bd21-4f1f3d861d03-kube-api-access-kj74g" (OuterVolumeSpecName: "kube-api-access-kj74g") pod "ecb3b568-1d84-4314-bd21-4f1f3d861d03" (UID: "ecb3b568-1d84-4314-bd21-4f1f3d861d03"). InnerVolumeSpecName "kube-api-access-kj74g". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:43:06 crc kubenswrapper[4779]: I0929 09:43:06.524557 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ecb3b568-1d84-4314-bd21-4f1f3d861d03-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ecb3b568-1d84-4314-bd21-4f1f3d861d03" (UID: "ecb3b568-1d84-4314-bd21-4f1f3d861d03"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:43:06 crc kubenswrapper[4779]: I0929 09:43:06.550303 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ecb3b568-1d84-4314-bd21-4f1f3d861d03-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 09:43:06 crc kubenswrapper[4779]: I0929 09:43:06.550340 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ecb3b568-1d84-4314-bd21-4f1f3d861d03-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 09:43:06 crc kubenswrapper[4779]: I0929 09:43:06.550355 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kj74g\" (UniqueName: \"kubernetes.io/projected/ecb3b568-1d84-4314-bd21-4f1f3d861d03-kube-api-access-kj74g\") on node \"crc\" DevicePath \"\"" Sep 29 09:43:07 crc kubenswrapper[4779]: I0929 09:43:07.301871 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cfnz8" event={"ID":"ecb3b568-1d84-4314-bd21-4f1f3d861d03","Type":"ContainerDied","Data":"5cb62996983a0f4abfea927448e169fb9aa78c9fd8f67fde82b312c36e452219"} Sep 29 09:43:07 crc kubenswrapper[4779]: I0929 09:43:07.301896 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cfnz8" Sep 29 09:43:07 crc kubenswrapper[4779]: I0929 09:43:07.302392 4779 scope.go:117] "RemoveContainer" containerID="2961c5d7994d6321dee5696029fd82776b261957b4980f6859e2a56953c31df8" Sep 29 09:43:07 crc kubenswrapper[4779]: I0929 09:43:07.305770 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pbqjn" event={"ID":"a4cbe52e-7019-4d83-b902-1b3bad442f0b","Type":"ContainerStarted","Data":"c37a88d280c52097f7b0c08eaae8d9ea8ed99f905a0099831e4a7273ae91295b"} Sep 29 09:43:07 crc kubenswrapper[4779]: I0929 09:43:07.321702 4779 scope.go:117] "RemoveContainer" containerID="095404260ab278e337d3123b15deaa743b851d0eea348360dff5bb8ccf4810f9" Sep 29 09:43:07 crc kubenswrapper[4779]: I0929 09:43:07.333283 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-pbqjn" podStartSLOduration=2.836974875 podStartE2EDuration="5.333259912s" podCreationTimestamp="2025-09-29 09:43:02 +0000 UTC" firstStartedPulling="2025-09-29 09:43:04.265489834 +0000 UTC m=+816.246813778" lastFinishedPulling="2025-09-29 09:43:06.761774911 +0000 UTC m=+818.743098815" observedRunningTime="2025-09-29 09:43:07.326577786 +0000 UTC m=+819.307901720" watchObservedRunningTime="2025-09-29 09:43:07.333259912 +0000 UTC m=+819.314583836" Sep 29 09:43:07 crc kubenswrapper[4779]: I0929 09:43:07.350925 4779 scope.go:117] "RemoveContainer" containerID="1829085d94c688bcb0bb756ce5df5a56d30b100159f0a0ff1f25bec59d012106" Sep 29 09:43:07 crc kubenswrapper[4779]: I0929 09:43:07.353714 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-cfnz8"] Sep 29 09:43:07 crc kubenswrapper[4779]: I0929 09:43:07.359694 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-cfnz8"] Sep 29 09:43:07 crc kubenswrapper[4779]: I0929 09:43:07.821294 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-hj95j" Sep 29 09:43:07 crc kubenswrapper[4779]: I0929 09:43:07.821359 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-hj95j" Sep 29 09:43:07 crc kubenswrapper[4779]: I0929 09:43:07.862318 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-hj95j" Sep 29 09:43:08 crc kubenswrapper[4779]: I0929 09:43:08.335199 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-hj95j" Sep 29 09:43:08 crc kubenswrapper[4779]: I0929 09:43:08.724887 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ecb3b568-1d84-4314-bd21-4f1f3d861d03" path="/var/lib/kubelet/pods/ecb3b568-1d84-4314-bd21-4f1f3d861d03/volumes" Sep 29 09:43:11 crc kubenswrapper[4779]: I0929 09:43:11.943976 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/bc479d90579b9ea3dd3b3145e413235762d1da82158e3625500bb41a58z8rpl"] Sep 29 09:43:11 crc kubenswrapper[4779]: E0929 09:43:11.956266 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ecb3b568-1d84-4314-bd21-4f1f3d861d03" containerName="registry-server" Sep 29 09:43:11 crc kubenswrapper[4779]: I0929 09:43:11.956290 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="ecb3b568-1d84-4314-bd21-4f1f3d861d03" containerName="registry-server" Sep 29 09:43:11 crc kubenswrapper[4779]: E0929 09:43:11.956321 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ecb3b568-1d84-4314-bd21-4f1f3d861d03" containerName="extract-utilities" Sep 29 09:43:11 crc kubenswrapper[4779]: I0929 09:43:11.956332 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="ecb3b568-1d84-4314-bd21-4f1f3d861d03" containerName="extract-utilities" Sep 29 09:43:11 crc kubenswrapper[4779]: E0929 09:43:11.956351 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ecb3b568-1d84-4314-bd21-4f1f3d861d03" containerName="extract-content" Sep 29 09:43:11 crc kubenswrapper[4779]: I0929 09:43:11.956360 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="ecb3b568-1d84-4314-bd21-4f1f3d861d03" containerName="extract-content" Sep 29 09:43:11 crc kubenswrapper[4779]: I0929 09:43:11.956515 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="ecb3b568-1d84-4314-bd21-4f1f3d861d03" containerName="registry-server" Sep 29 09:43:11 crc kubenswrapper[4779]: I0929 09:43:11.957596 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/bc479d90579b9ea3dd3b3145e413235762d1da82158e3625500bb41a58z8rpl"] Sep 29 09:43:11 crc kubenswrapper[4779]: I0929 09:43:11.957790 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/bc479d90579b9ea3dd3b3145e413235762d1da82158e3625500bb41a58z8rpl" Sep 29 09:43:11 crc kubenswrapper[4779]: I0929 09:43:11.962390 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-wmhkn" Sep 29 09:43:12 crc kubenswrapper[4779]: I0929 09:43:12.147463 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3c43da05-c4b7-4316-b3c0-464cc862dc74-util\") pod \"bc479d90579b9ea3dd3b3145e413235762d1da82158e3625500bb41a58z8rpl\" (UID: \"3c43da05-c4b7-4316-b3c0-464cc862dc74\") " pod="openstack-operators/bc479d90579b9ea3dd3b3145e413235762d1da82158e3625500bb41a58z8rpl" Sep 29 09:43:12 crc kubenswrapper[4779]: I0929 09:43:12.147600 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3c43da05-c4b7-4316-b3c0-464cc862dc74-bundle\") pod \"bc479d90579b9ea3dd3b3145e413235762d1da82158e3625500bb41a58z8rpl\" (UID: \"3c43da05-c4b7-4316-b3c0-464cc862dc74\") " pod="openstack-operators/bc479d90579b9ea3dd3b3145e413235762d1da82158e3625500bb41a58z8rpl" Sep 29 09:43:12 crc kubenswrapper[4779]: I0929 09:43:12.147704 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8qs84\" (UniqueName: \"kubernetes.io/projected/3c43da05-c4b7-4316-b3c0-464cc862dc74-kube-api-access-8qs84\") pod \"bc479d90579b9ea3dd3b3145e413235762d1da82158e3625500bb41a58z8rpl\" (UID: \"3c43da05-c4b7-4316-b3c0-464cc862dc74\") " pod="openstack-operators/bc479d90579b9ea3dd3b3145e413235762d1da82158e3625500bb41a58z8rpl" Sep 29 09:43:12 crc kubenswrapper[4779]: I0929 09:43:12.249173 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3c43da05-c4b7-4316-b3c0-464cc862dc74-bundle\") pod \"bc479d90579b9ea3dd3b3145e413235762d1da82158e3625500bb41a58z8rpl\" (UID: \"3c43da05-c4b7-4316-b3c0-464cc862dc74\") " pod="openstack-operators/bc479d90579b9ea3dd3b3145e413235762d1da82158e3625500bb41a58z8rpl" Sep 29 09:43:12 crc kubenswrapper[4779]: I0929 09:43:12.249658 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8qs84\" (UniqueName: \"kubernetes.io/projected/3c43da05-c4b7-4316-b3c0-464cc862dc74-kube-api-access-8qs84\") pod \"bc479d90579b9ea3dd3b3145e413235762d1da82158e3625500bb41a58z8rpl\" (UID: \"3c43da05-c4b7-4316-b3c0-464cc862dc74\") " pod="openstack-operators/bc479d90579b9ea3dd3b3145e413235762d1da82158e3625500bb41a58z8rpl" Sep 29 09:43:12 crc kubenswrapper[4779]: I0929 09:43:12.250031 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3c43da05-c4b7-4316-b3c0-464cc862dc74-util\") pod \"bc479d90579b9ea3dd3b3145e413235762d1da82158e3625500bb41a58z8rpl\" (UID: \"3c43da05-c4b7-4316-b3c0-464cc862dc74\") " pod="openstack-operators/bc479d90579b9ea3dd3b3145e413235762d1da82158e3625500bb41a58z8rpl" Sep 29 09:43:12 crc kubenswrapper[4779]: I0929 09:43:12.250197 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3c43da05-c4b7-4316-b3c0-464cc862dc74-bundle\") pod \"bc479d90579b9ea3dd3b3145e413235762d1da82158e3625500bb41a58z8rpl\" (UID: \"3c43da05-c4b7-4316-b3c0-464cc862dc74\") " pod="openstack-operators/bc479d90579b9ea3dd3b3145e413235762d1da82158e3625500bb41a58z8rpl" Sep 29 09:43:12 crc kubenswrapper[4779]: I0929 09:43:12.250733 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3c43da05-c4b7-4316-b3c0-464cc862dc74-util\") pod \"bc479d90579b9ea3dd3b3145e413235762d1da82158e3625500bb41a58z8rpl\" (UID: \"3c43da05-c4b7-4316-b3c0-464cc862dc74\") " pod="openstack-operators/bc479d90579b9ea3dd3b3145e413235762d1da82158e3625500bb41a58z8rpl" Sep 29 09:43:12 crc kubenswrapper[4779]: I0929 09:43:12.273987 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8qs84\" (UniqueName: \"kubernetes.io/projected/3c43da05-c4b7-4316-b3c0-464cc862dc74-kube-api-access-8qs84\") pod \"bc479d90579b9ea3dd3b3145e413235762d1da82158e3625500bb41a58z8rpl\" (UID: \"3c43da05-c4b7-4316-b3c0-464cc862dc74\") " pod="openstack-operators/bc479d90579b9ea3dd3b3145e413235762d1da82158e3625500bb41a58z8rpl" Sep 29 09:43:12 crc kubenswrapper[4779]: I0929 09:43:12.279633 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/bc479d90579b9ea3dd3b3145e413235762d1da82158e3625500bb41a58z8rpl" Sep 29 09:43:12 crc kubenswrapper[4779]: I0929 09:43:12.738530 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/bc479d90579b9ea3dd3b3145e413235762d1da82158e3625500bb41a58z8rpl"] Sep 29 09:43:12 crc kubenswrapper[4779]: W0929 09:43:12.748250 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3c43da05_c4b7_4316_b3c0_464cc862dc74.slice/crio-3b4bd590f41ae3c216dc373cc471b1326363959461ce2386f91f021b686644ae WatchSource:0}: Error finding container 3b4bd590f41ae3c216dc373cc471b1326363959461ce2386f91f021b686644ae: Status 404 returned error can't find the container with id 3b4bd590f41ae3c216dc373cc471b1326363959461ce2386f91f021b686644ae Sep 29 09:43:13 crc kubenswrapper[4779]: I0929 09:43:13.029007 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-pbqjn" Sep 29 09:43:13 crc kubenswrapper[4779]: I0929 09:43:13.029326 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-pbqjn" Sep 29 09:43:13 crc kubenswrapper[4779]: I0929 09:43:13.086874 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-pbqjn" Sep 29 09:43:13 crc kubenswrapper[4779]: I0929 09:43:13.355283 4779 generic.go:334] "Generic (PLEG): container finished" podID="3c43da05-c4b7-4316-b3c0-464cc862dc74" containerID="8db5f3979ef1a2470d1e0b817ff92b9361636e415dff2e0cf1a199fcad2f97f6" exitCode=0 Sep 29 09:43:13 crc kubenswrapper[4779]: I0929 09:43:13.356082 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/bc479d90579b9ea3dd3b3145e413235762d1da82158e3625500bb41a58z8rpl" event={"ID":"3c43da05-c4b7-4316-b3c0-464cc862dc74","Type":"ContainerDied","Data":"8db5f3979ef1a2470d1e0b817ff92b9361636e415dff2e0cf1a199fcad2f97f6"} Sep 29 09:43:13 crc kubenswrapper[4779]: I0929 09:43:13.356161 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/bc479d90579b9ea3dd3b3145e413235762d1da82158e3625500bb41a58z8rpl" event={"ID":"3c43da05-c4b7-4316-b3c0-464cc862dc74","Type":"ContainerStarted","Data":"3b4bd590f41ae3c216dc373cc471b1326363959461ce2386f91f021b686644ae"} Sep 29 09:43:13 crc kubenswrapper[4779]: I0929 09:43:13.402670 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-pbqjn" Sep 29 09:43:14 crc kubenswrapper[4779]: I0929 09:43:14.373158 4779 generic.go:334] "Generic (PLEG): container finished" podID="3c43da05-c4b7-4316-b3c0-464cc862dc74" containerID="df3ba1aa8f61dedf29bf30bfd08f963c51222be94b8a82cc7b56790b70650b44" exitCode=0 Sep 29 09:43:14 crc kubenswrapper[4779]: I0929 09:43:14.373244 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/bc479d90579b9ea3dd3b3145e413235762d1da82158e3625500bb41a58z8rpl" event={"ID":"3c43da05-c4b7-4316-b3c0-464cc862dc74","Type":"ContainerDied","Data":"df3ba1aa8f61dedf29bf30bfd08f963c51222be94b8a82cc7b56790b70650b44"} Sep 29 09:43:15 crc kubenswrapper[4779]: I0929 09:43:15.379960 4779 generic.go:334] "Generic (PLEG): container finished" podID="3c43da05-c4b7-4316-b3c0-464cc862dc74" containerID="a17b73e3dd988f62587d62238d838dee79713df9d7a97251f945299c616b790e" exitCode=0 Sep 29 09:43:15 crc kubenswrapper[4779]: I0929 09:43:15.380008 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/bc479d90579b9ea3dd3b3145e413235762d1da82158e3625500bb41a58z8rpl" event={"ID":"3c43da05-c4b7-4316-b3c0-464cc862dc74","Type":"ContainerDied","Data":"a17b73e3dd988f62587d62238d838dee79713df9d7a97251f945299c616b790e"} Sep 29 09:43:16 crc kubenswrapper[4779]: I0929 09:43:16.672782 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/bc479d90579b9ea3dd3b3145e413235762d1da82158e3625500bb41a58z8rpl" Sep 29 09:43:16 crc kubenswrapper[4779]: I0929 09:43:16.812509 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8qs84\" (UniqueName: \"kubernetes.io/projected/3c43da05-c4b7-4316-b3c0-464cc862dc74-kube-api-access-8qs84\") pod \"3c43da05-c4b7-4316-b3c0-464cc862dc74\" (UID: \"3c43da05-c4b7-4316-b3c0-464cc862dc74\") " Sep 29 09:43:16 crc kubenswrapper[4779]: I0929 09:43:16.812598 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3c43da05-c4b7-4316-b3c0-464cc862dc74-bundle\") pod \"3c43da05-c4b7-4316-b3c0-464cc862dc74\" (UID: \"3c43da05-c4b7-4316-b3c0-464cc862dc74\") " Sep 29 09:43:16 crc kubenswrapper[4779]: I0929 09:43:16.812643 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3c43da05-c4b7-4316-b3c0-464cc862dc74-util\") pod \"3c43da05-c4b7-4316-b3c0-464cc862dc74\" (UID: \"3c43da05-c4b7-4316-b3c0-464cc862dc74\") " Sep 29 09:43:16 crc kubenswrapper[4779]: I0929 09:43:16.813358 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3c43da05-c4b7-4316-b3c0-464cc862dc74-bundle" (OuterVolumeSpecName: "bundle") pod "3c43da05-c4b7-4316-b3c0-464cc862dc74" (UID: "3c43da05-c4b7-4316-b3c0-464cc862dc74"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:43:16 crc kubenswrapper[4779]: I0929 09:43:16.817830 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3c43da05-c4b7-4316-b3c0-464cc862dc74-kube-api-access-8qs84" (OuterVolumeSpecName: "kube-api-access-8qs84") pod "3c43da05-c4b7-4316-b3c0-464cc862dc74" (UID: "3c43da05-c4b7-4316-b3c0-464cc862dc74"). InnerVolumeSpecName "kube-api-access-8qs84". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:43:16 crc kubenswrapper[4779]: I0929 09:43:16.829725 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3c43da05-c4b7-4316-b3c0-464cc862dc74-util" (OuterVolumeSpecName: "util") pod "3c43da05-c4b7-4316-b3c0-464cc862dc74" (UID: "3c43da05-c4b7-4316-b3c0-464cc862dc74"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:43:16 crc kubenswrapper[4779]: I0929 09:43:16.914474 4779 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3c43da05-c4b7-4316-b3c0-464cc862dc74-util\") on node \"crc\" DevicePath \"\"" Sep 29 09:43:16 crc kubenswrapper[4779]: I0929 09:43:16.914510 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8qs84\" (UniqueName: \"kubernetes.io/projected/3c43da05-c4b7-4316-b3c0-464cc862dc74-kube-api-access-8qs84\") on node \"crc\" DevicePath \"\"" Sep 29 09:43:16 crc kubenswrapper[4779]: I0929 09:43:16.914524 4779 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3c43da05-c4b7-4316-b3c0-464cc862dc74-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 09:43:16 crc kubenswrapper[4779]: I0929 09:43:16.966499 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 09:43:16 crc kubenswrapper[4779]: I0929 09:43:16.966564 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 09:43:17 crc kubenswrapper[4779]: I0929 09:43:17.399158 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/bc479d90579b9ea3dd3b3145e413235762d1da82158e3625500bb41a58z8rpl" event={"ID":"3c43da05-c4b7-4316-b3c0-464cc862dc74","Type":"ContainerDied","Data":"3b4bd590f41ae3c216dc373cc471b1326363959461ce2386f91f021b686644ae"} Sep 29 09:43:17 crc kubenswrapper[4779]: I0929 09:43:17.399198 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3b4bd590f41ae3c216dc373cc471b1326363959461ce2386f91f021b686644ae" Sep 29 09:43:17 crc kubenswrapper[4779]: I0929 09:43:17.399197 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/bc479d90579b9ea3dd3b3145e413235762d1da82158e3625500bb41a58z8rpl" Sep 29 09:43:19 crc kubenswrapper[4779]: I0929 09:43:19.493855 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-pbqjn"] Sep 29 09:43:19 crc kubenswrapper[4779]: I0929 09:43:19.495567 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-pbqjn" podUID="a4cbe52e-7019-4d83-b902-1b3bad442f0b" containerName="registry-server" containerID="cri-o://c37a88d280c52097f7b0c08eaae8d9ea8ed99f905a0099831e4a7273ae91295b" gracePeriod=2 Sep 29 09:43:19 crc kubenswrapper[4779]: I0929 09:43:19.991544 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pbqjn" Sep 29 09:43:20 crc kubenswrapper[4779]: I0929 09:43:20.063930 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4cbe52e-7019-4d83-b902-1b3bad442f0b-catalog-content\") pod \"a4cbe52e-7019-4d83-b902-1b3bad442f0b\" (UID: \"a4cbe52e-7019-4d83-b902-1b3bad442f0b\") " Sep 29 09:43:20 crc kubenswrapper[4779]: I0929 09:43:20.064008 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kwp2h\" (UniqueName: \"kubernetes.io/projected/a4cbe52e-7019-4d83-b902-1b3bad442f0b-kube-api-access-kwp2h\") pod \"a4cbe52e-7019-4d83-b902-1b3bad442f0b\" (UID: \"a4cbe52e-7019-4d83-b902-1b3bad442f0b\") " Sep 29 09:43:20 crc kubenswrapper[4779]: I0929 09:43:20.064042 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4cbe52e-7019-4d83-b902-1b3bad442f0b-utilities\") pod \"a4cbe52e-7019-4d83-b902-1b3bad442f0b\" (UID: \"a4cbe52e-7019-4d83-b902-1b3bad442f0b\") " Sep 29 09:43:20 crc kubenswrapper[4779]: I0929 09:43:20.065209 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a4cbe52e-7019-4d83-b902-1b3bad442f0b-utilities" (OuterVolumeSpecName: "utilities") pod "a4cbe52e-7019-4d83-b902-1b3bad442f0b" (UID: "a4cbe52e-7019-4d83-b902-1b3bad442f0b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:43:20 crc kubenswrapper[4779]: I0929 09:43:20.072642 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a4cbe52e-7019-4d83-b902-1b3bad442f0b-kube-api-access-kwp2h" (OuterVolumeSpecName: "kube-api-access-kwp2h") pod "a4cbe52e-7019-4d83-b902-1b3bad442f0b" (UID: "a4cbe52e-7019-4d83-b902-1b3bad442f0b"). InnerVolumeSpecName "kube-api-access-kwp2h". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:43:20 crc kubenswrapper[4779]: I0929 09:43:20.134525 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a4cbe52e-7019-4d83-b902-1b3bad442f0b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a4cbe52e-7019-4d83-b902-1b3bad442f0b" (UID: "a4cbe52e-7019-4d83-b902-1b3bad442f0b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:43:20 crc kubenswrapper[4779]: I0929 09:43:20.165511 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4cbe52e-7019-4d83-b902-1b3bad442f0b-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 09:43:20 crc kubenswrapper[4779]: I0929 09:43:20.165546 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kwp2h\" (UniqueName: \"kubernetes.io/projected/a4cbe52e-7019-4d83-b902-1b3bad442f0b-kube-api-access-kwp2h\") on node \"crc\" DevicePath \"\"" Sep 29 09:43:20 crc kubenswrapper[4779]: I0929 09:43:20.165561 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4cbe52e-7019-4d83-b902-1b3bad442f0b-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 09:43:20 crc kubenswrapper[4779]: I0929 09:43:20.425266 4779 generic.go:334] "Generic (PLEG): container finished" podID="a4cbe52e-7019-4d83-b902-1b3bad442f0b" containerID="c37a88d280c52097f7b0c08eaae8d9ea8ed99f905a0099831e4a7273ae91295b" exitCode=0 Sep 29 09:43:20 crc kubenswrapper[4779]: I0929 09:43:20.425315 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pbqjn" event={"ID":"a4cbe52e-7019-4d83-b902-1b3bad442f0b","Type":"ContainerDied","Data":"c37a88d280c52097f7b0c08eaae8d9ea8ed99f905a0099831e4a7273ae91295b"} Sep 29 09:43:20 crc kubenswrapper[4779]: I0929 09:43:20.425352 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pbqjn" event={"ID":"a4cbe52e-7019-4d83-b902-1b3bad442f0b","Type":"ContainerDied","Data":"26ccf574ca6e701f65fca654d372a321dc6d67155cb9d30448978c32a0e199c3"} Sep 29 09:43:20 crc kubenswrapper[4779]: I0929 09:43:20.425377 4779 scope.go:117] "RemoveContainer" containerID="c37a88d280c52097f7b0c08eaae8d9ea8ed99f905a0099831e4a7273ae91295b" Sep 29 09:43:20 crc kubenswrapper[4779]: I0929 09:43:20.425391 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pbqjn" Sep 29 09:43:20 crc kubenswrapper[4779]: I0929 09:43:20.461997 4779 scope.go:117] "RemoveContainer" containerID="c4b830dfb699bd7a8b06679df65fdf1d3257ac3f6c53acdef64f6d6b8bd2b507" Sep 29 09:43:20 crc kubenswrapper[4779]: I0929 09:43:20.473504 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-pbqjn"] Sep 29 09:43:20 crc kubenswrapper[4779]: I0929 09:43:20.483843 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-pbqjn"] Sep 29 09:43:20 crc kubenswrapper[4779]: I0929 09:43:20.501921 4779 scope.go:117] "RemoveContainer" containerID="253b8d7aa01ede6beeb74cad76e4f3e84cfe0f64f127fc13a972b4ef936fe4bd" Sep 29 09:43:20 crc kubenswrapper[4779]: I0929 09:43:20.519916 4779 scope.go:117] "RemoveContainer" containerID="c37a88d280c52097f7b0c08eaae8d9ea8ed99f905a0099831e4a7273ae91295b" Sep 29 09:43:20 crc kubenswrapper[4779]: E0929 09:43:20.520247 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c37a88d280c52097f7b0c08eaae8d9ea8ed99f905a0099831e4a7273ae91295b\": container with ID starting with c37a88d280c52097f7b0c08eaae8d9ea8ed99f905a0099831e4a7273ae91295b not found: ID does not exist" containerID="c37a88d280c52097f7b0c08eaae8d9ea8ed99f905a0099831e4a7273ae91295b" Sep 29 09:43:20 crc kubenswrapper[4779]: I0929 09:43:20.520272 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c37a88d280c52097f7b0c08eaae8d9ea8ed99f905a0099831e4a7273ae91295b"} err="failed to get container status \"c37a88d280c52097f7b0c08eaae8d9ea8ed99f905a0099831e4a7273ae91295b\": rpc error: code = NotFound desc = could not find container \"c37a88d280c52097f7b0c08eaae8d9ea8ed99f905a0099831e4a7273ae91295b\": container with ID starting with c37a88d280c52097f7b0c08eaae8d9ea8ed99f905a0099831e4a7273ae91295b not found: ID does not exist" Sep 29 09:43:20 crc kubenswrapper[4779]: I0929 09:43:20.520291 4779 scope.go:117] "RemoveContainer" containerID="c4b830dfb699bd7a8b06679df65fdf1d3257ac3f6c53acdef64f6d6b8bd2b507" Sep 29 09:43:20 crc kubenswrapper[4779]: E0929 09:43:20.520640 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c4b830dfb699bd7a8b06679df65fdf1d3257ac3f6c53acdef64f6d6b8bd2b507\": container with ID starting with c4b830dfb699bd7a8b06679df65fdf1d3257ac3f6c53acdef64f6d6b8bd2b507 not found: ID does not exist" containerID="c4b830dfb699bd7a8b06679df65fdf1d3257ac3f6c53acdef64f6d6b8bd2b507" Sep 29 09:43:20 crc kubenswrapper[4779]: I0929 09:43:20.520674 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c4b830dfb699bd7a8b06679df65fdf1d3257ac3f6c53acdef64f6d6b8bd2b507"} err="failed to get container status \"c4b830dfb699bd7a8b06679df65fdf1d3257ac3f6c53acdef64f6d6b8bd2b507\": rpc error: code = NotFound desc = could not find container \"c4b830dfb699bd7a8b06679df65fdf1d3257ac3f6c53acdef64f6d6b8bd2b507\": container with ID starting with c4b830dfb699bd7a8b06679df65fdf1d3257ac3f6c53acdef64f6d6b8bd2b507 not found: ID does not exist" Sep 29 09:43:20 crc kubenswrapper[4779]: I0929 09:43:20.520698 4779 scope.go:117] "RemoveContainer" containerID="253b8d7aa01ede6beeb74cad76e4f3e84cfe0f64f127fc13a972b4ef936fe4bd" Sep 29 09:43:20 crc kubenswrapper[4779]: E0929 09:43:20.521022 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"253b8d7aa01ede6beeb74cad76e4f3e84cfe0f64f127fc13a972b4ef936fe4bd\": container with ID starting with 253b8d7aa01ede6beeb74cad76e4f3e84cfe0f64f127fc13a972b4ef936fe4bd not found: ID does not exist" containerID="253b8d7aa01ede6beeb74cad76e4f3e84cfe0f64f127fc13a972b4ef936fe4bd" Sep 29 09:43:20 crc kubenswrapper[4779]: I0929 09:43:20.521055 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"253b8d7aa01ede6beeb74cad76e4f3e84cfe0f64f127fc13a972b4ef936fe4bd"} err="failed to get container status \"253b8d7aa01ede6beeb74cad76e4f3e84cfe0f64f127fc13a972b4ef936fe4bd\": rpc error: code = NotFound desc = could not find container \"253b8d7aa01ede6beeb74cad76e4f3e84cfe0f64f127fc13a972b4ef936fe4bd\": container with ID starting with 253b8d7aa01ede6beeb74cad76e4f3e84cfe0f64f127fc13a972b4ef936fe4bd not found: ID does not exist" Sep 29 09:43:20 crc kubenswrapper[4779]: I0929 09:43:20.720923 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a4cbe52e-7019-4d83-b902-1b3bad442f0b" path="/var/lib/kubelet/pods/a4cbe52e-7019-4d83-b902-1b3bad442f0b/volumes" Sep 29 09:43:24 crc kubenswrapper[4779]: I0929 09:43:24.258676 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-6f5d85b94b-vc7f6"] Sep 29 09:43:24 crc kubenswrapper[4779]: E0929 09:43:24.259203 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4cbe52e-7019-4d83-b902-1b3bad442f0b" containerName="extract-utilities" Sep 29 09:43:24 crc kubenswrapper[4779]: I0929 09:43:24.259214 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4cbe52e-7019-4d83-b902-1b3bad442f0b" containerName="extract-utilities" Sep 29 09:43:24 crc kubenswrapper[4779]: E0929 09:43:24.259221 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4cbe52e-7019-4d83-b902-1b3bad442f0b" containerName="extract-content" Sep 29 09:43:24 crc kubenswrapper[4779]: I0929 09:43:24.259227 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4cbe52e-7019-4d83-b902-1b3bad442f0b" containerName="extract-content" Sep 29 09:43:24 crc kubenswrapper[4779]: E0929 09:43:24.259238 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c43da05-c4b7-4316-b3c0-464cc862dc74" containerName="extract" Sep 29 09:43:24 crc kubenswrapper[4779]: I0929 09:43:24.259244 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c43da05-c4b7-4316-b3c0-464cc862dc74" containerName="extract" Sep 29 09:43:24 crc kubenswrapper[4779]: E0929 09:43:24.259252 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4cbe52e-7019-4d83-b902-1b3bad442f0b" containerName="registry-server" Sep 29 09:43:24 crc kubenswrapper[4779]: I0929 09:43:24.259258 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4cbe52e-7019-4d83-b902-1b3bad442f0b" containerName="registry-server" Sep 29 09:43:24 crc kubenswrapper[4779]: E0929 09:43:24.259264 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c43da05-c4b7-4316-b3c0-464cc862dc74" containerName="util" Sep 29 09:43:24 crc kubenswrapper[4779]: I0929 09:43:24.259269 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c43da05-c4b7-4316-b3c0-464cc862dc74" containerName="util" Sep 29 09:43:24 crc kubenswrapper[4779]: E0929 09:43:24.259280 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c43da05-c4b7-4316-b3c0-464cc862dc74" containerName="pull" Sep 29 09:43:24 crc kubenswrapper[4779]: I0929 09:43:24.259286 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c43da05-c4b7-4316-b3c0-464cc862dc74" containerName="pull" Sep 29 09:43:24 crc kubenswrapper[4779]: I0929 09:43:24.259399 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="a4cbe52e-7019-4d83-b902-1b3bad442f0b" containerName="registry-server" Sep 29 09:43:24 crc kubenswrapper[4779]: I0929 09:43:24.259415 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c43da05-c4b7-4316-b3c0-464cc862dc74" containerName="extract" Sep 29 09:43:24 crc kubenswrapper[4779]: I0929 09:43:24.260063 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-6f5d85b94b-vc7f6" Sep 29 09:43:24 crc kubenswrapper[4779]: I0929 09:43:24.262270 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-qlcn4" Sep 29 09:43:24 crc kubenswrapper[4779]: I0929 09:43:24.309046 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-6f5d85b94b-vc7f6"] Sep 29 09:43:24 crc kubenswrapper[4779]: I0929 09:43:24.424453 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4bjfs\" (UniqueName: \"kubernetes.io/projected/6ed08027-8bec-43f6-a452-703eebf6792b-kube-api-access-4bjfs\") pod \"openstack-operator-controller-operator-6f5d85b94b-vc7f6\" (UID: \"6ed08027-8bec-43f6-a452-703eebf6792b\") " pod="openstack-operators/openstack-operator-controller-operator-6f5d85b94b-vc7f6" Sep 29 09:43:24 crc kubenswrapper[4779]: I0929 09:43:24.525956 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4bjfs\" (UniqueName: \"kubernetes.io/projected/6ed08027-8bec-43f6-a452-703eebf6792b-kube-api-access-4bjfs\") pod \"openstack-operator-controller-operator-6f5d85b94b-vc7f6\" (UID: \"6ed08027-8bec-43f6-a452-703eebf6792b\") " pod="openstack-operators/openstack-operator-controller-operator-6f5d85b94b-vc7f6" Sep 29 09:43:24 crc kubenswrapper[4779]: I0929 09:43:24.545934 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4bjfs\" (UniqueName: \"kubernetes.io/projected/6ed08027-8bec-43f6-a452-703eebf6792b-kube-api-access-4bjfs\") pod \"openstack-operator-controller-operator-6f5d85b94b-vc7f6\" (UID: \"6ed08027-8bec-43f6-a452-703eebf6792b\") " pod="openstack-operators/openstack-operator-controller-operator-6f5d85b94b-vc7f6" Sep 29 09:43:24 crc kubenswrapper[4779]: I0929 09:43:24.576739 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-6f5d85b94b-vc7f6" Sep 29 09:43:25 crc kubenswrapper[4779]: I0929 09:43:25.014580 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-6f5d85b94b-vc7f6"] Sep 29 09:43:25 crc kubenswrapper[4779]: I0929 09:43:25.457926 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-6f5d85b94b-vc7f6" event={"ID":"6ed08027-8bec-43f6-a452-703eebf6792b","Type":"ContainerStarted","Data":"85dd861a09a90169390088d94e6600ecdd215149c0a7a714748bbfeca4bd6cf1"} Sep 29 09:43:29 crc kubenswrapper[4779]: I0929 09:43:29.483197 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-6f5d85b94b-vc7f6" event={"ID":"6ed08027-8bec-43f6-a452-703eebf6792b","Type":"ContainerStarted","Data":"82e4ed14894361f53732c2124e76c7c37635f873711cc65750b01db32f7bede9"} Sep 29 09:43:31 crc kubenswrapper[4779]: I0929 09:43:31.495422 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-6f5d85b94b-vc7f6" event={"ID":"6ed08027-8bec-43f6-a452-703eebf6792b","Type":"ContainerStarted","Data":"4ada5955bacfbfafeb61ea2e355889371e5830cb376d1a915533de07676b7d7a"} Sep 29 09:43:31 crc kubenswrapper[4779]: I0929 09:43:31.495790 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-6f5d85b94b-vc7f6" Sep 29 09:43:31 crc kubenswrapper[4779]: I0929 09:43:31.529760 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-6f5d85b94b-vc7f6" podStartSLOduration=2.009429685 podStartE2EDuration="7.52973541s" podCreationTimestamp="2025-09-29 09:43:24 +0000 UTC" firstStartedPulling="2025-09-29 09:43:25.023067788 +0000 UTC m=+837.004391692" lastFinishedPulling="2025-09-29 09:43:30.543373513 +0000 UTC m=+842.524697417" observedRunningTime="2025-09-29 09:43:31.520937944 +0000 UTC m=+843.502261848" watchObservedRunningTime="2025-09-29 09:43:31.52973541 +0000 UTC m=+843.511059354" Sep 29 09:43:34 crc kubenswrapper[4779]: I0929 09:43:34.580398 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-6f5d85b94b-vc7f6" Sep 29 09:43:46 crc kubenswrapper[4779]: I0929 09:43:46.966514 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 09:43:46 crc kubenswrapper[4779]: I0929 09:43:46.967077 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 09:43:46 crc kubenswrapper[4779]: I0929 09:43:46.967118 4779 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" Sep 29 09:43:46 crc kubenswrapper[4779]: I0929 09:43:46.967614 4779 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0e0bb92440d884ad90defff16322d948e1df51b9f7349061e9da58a7e515a610"} pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 09:43:46 crc kubenswrapper[4779]: I0929 09:43:46.967661 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" containerID="cri-o://0e0bb92440d884ad90defff16322d948e1df51b9f7349061e9da58a7e515a610" gracePeriod=600 Sep 29 09:43:47 crc kubenswrapper[4779]: I0929 09:43:47.602438 4779 generic.go:334] "Generic (PLEG): container finished" podID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerID="0e0bb92440d884ad90defff16322d948e1df51b9f7349061e9da58a7e515a610" exitCode=0 Sep 29 09:43:47 crc kubenswrapper[4779]: I0929 09:43:47.602482 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" event={"ID":"f1a5d3a7-37d9-4a87-864c-e4af7f504a19","Type":"ContainerDied","Data":"0e0bb92440d884ad90defff16322d948e1df51b9f7349061e9da58a7e515a610"} Sep 29 09:43:47 crc kubenswrapper[4779]: I0929 09:43:47.602813 4779 scope.go:117] "RemoveContainer" containerID="7390d6cd5471c2ba2b59c4030a2423a19ab8e39bfa71091617e29773167c149f" Sep 29 09:43:48 crc kubenswrapper[4779]: I0929 09:43:48.612802 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" event={"ID":"f1a5d3a7-37d9-4a87-864c-e4af7f504a19","Type":"ContainerStarted","Data":"fcd55a765537b74d7fe03acdaa880fd723f800adde7aab67d7d2e84cbd82c102"} Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.112749 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-6495d75b5-dwvhq"] Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.114579 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-6495d75b5-dwvhq" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.117862 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-4r8kn" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.119131 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-748c574d75-cssz9"] Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.120363 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-748c574d75-cssz9" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.125420 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-9tbsg" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.130820 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-6495d75b5-dwvhq"] Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.136228 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-748c574d75-cssz9"] Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.144718 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-7d74f4d695-jdvhr"] Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.146232 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-7d74f4d695-jdvhr" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.148967 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-67b5d44b7f-6r9tp"] Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.149852 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-67b5d44b7f-6r9tp" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.152198 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-fgcnm" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.152654 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-gcp2w" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.157591 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tt2j6\" (UniqueName: \"kubernetes.io/projected/02af413e-f4cb-413e-b029-5410ad2bb9ab-kube-api-access-tt2j6\") pod \"barbican-operator-controller-manager-6495d75b5-dwvhq\" (UID: \"02af413e-f4cb-413e-b029-5410ad2bb9ab\") " pod="openstack-operators/barbican-operator-controller-manager-6495d75b5-dwvhq" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.157636 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c885v\" (UniqueName: \"kubernetes.io/projected/a5133455-fda2-4b98-9465-8421aae72e9c-kube-api-access-c885v\") pod \"cinder-operator-controller-manager-748c574d75-cssz9\" (UID: \"a5133455-fda2-4b98-9465-8421aae72e9c\") " pod="openstack-operators/cinder-operator-controller-manager-748c574d75-cssz9" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.157678 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w8wsm\" (UniqueName: \"kubernetes.io/projected/3ebfd5a9-ed4f-4589-900e-0c44346fece4-kube-api-access-w8wsm\") pod \"glance-operator-controller-manager-67b5d44b7f-6r9tp\" (UID: \"3ebfd5a9-ed4f-4589-900e-0c44346fece4\") " pod="openstack-operators/glance-operator-controller-manager-67b5d44b7f-6r9tp" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.157750 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rsbxt\" (UniqueName: \"kubernetes.io/projected/01b3e1fb-fbfd-48c6-a89f-cc347df3a24b-kube-api-access-rsbxt\") pod \"designate-operator-controller-manager-7d74f4d695-jdvhr\" (UID: \"01b3e1fb-fbfd-48c6-a89f-cc347df3a24b\") " pod="openstack-operators/designate-operator-controller-manager-7d74f4d695-jdvhr" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.174780 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-67b5d44b7f-6r9tp"] Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.183869 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-7d74f4d695-jdvhr"] Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.197437 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-8ff95898-z2ltj"] Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.199055 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-8ff95898-z2ltj" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.205352 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-8s49p" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.222004 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-695847bc78-gnhwx"] Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.223410 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-695847bc78-gnhwx" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.232582 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-64tk9" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.244557 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-8ff95898-z2ltj"] Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.272409 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rsbxt\" (UniqueName: \"kubernetes.io/projected/01b3e1fb-fbfd-48c6-a89f-cc347df3a24b-kube-api-access-rsbxt\") pod \"designate-operator-controller-manager-7d74f4d695-jdvhr\" (UID: \"01b3e1fb-fbfd-48c6-a89f-cc347df3a24b\") " pod="openstack-operators/designate-operator-controller-manager-7d74f4d695-jdvhr" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.272513 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tt2j6\" (UniqueName: \"kubernetes.io/projected/02af413e-f4cb-413e-b029-5410ad2bb9ab-kube-api-access-tt2j6\") pod \"barbican-operator-controller-manager-6495d75b5-dwvhq\" (UID: \"02af413e-f4cb-413e-b029-5410ad2bb9ab\") " pod="openstack-operators/barbican-operator-controller-manager-6495d75b5-dwvhq" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.272541 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c885v\" (UniqueName: \"kubernetes.io/projected/a5133455-fda2-4b98-9465-8421aae72e9c-kube-api-access-c885v\") pod \"cinder-operator-controller-manager-748c574d75-cssz9\" (UID: \"a5133455-fda2-4b98-9465-8421aae72e9c\") " pod="openstack-operators/cinder-operator-controller-manager-748c574d75-cssz9" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.272607 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w8wsm\" (UniqueName: \"kubernetes.io/projected/3ebfd5a9-ed4f-4589-900e-0c44346fece4-kube-api-access-w8wsm\") pod \"glance-operator-controller-manager-67b5d44b7f-6r9tp\" (UID: \"3ebfd5a9-ed4f-4589-900e-0c44346fece4\") " pod="openstack-operators/glance-operator-controller-manager-67b5d44b7f-6r9tp" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.272658 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-86p25\" (UniqueName: \"kubernetes.io/projected/618315d7-82b7-469d-ba71-a3fbb71ae08c-kube-api-access-86p25\") pod \"heat-operator-controller-manager-8ff95898-z2ltj\" (UID: \"618315d7-82b7-469d-ba71-a3fbb71ae08c\") " pod="openstack-operators/heat-operator-controller-manager-8ff95898-z2ltj" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.281426 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-695847bc78-gnhwx"] Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.298953 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-858cd69f49-7pldk"] Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.300942 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-858cd69f49-7pldk" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.315719 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tt2j6\" (UniqueName: \"kubernetes.io/projected/02af413e-f4cb-413e-b029-5410ad2bb9ab-kube-api-access-tt2j6\") pod \"barbican-operator-controller-manager-6495d75b5-dwvhq\" (UID: \"02af413e-f4cb-413e-b029-5410ad2bb9ab\") " pod="openstack-operators/barbican-operator-controller-manager-6495d75b5-dwvhq" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.315942 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-z8ftm" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.315949 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.315982 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w8wsm\" (UniqueName: \"kubernetes.io/projected/3ebfd5a9-ed4f-4589-900e-0c44346fece4-kube-api-access-w8wsm\") pod \"glance-operator-controller-manager-67b5d44b7f-6r9tp\" (UID: \"3ebfd5a9-ed4f-4589-900e-0c44346fece4\") " pod="openstack-operators/glance-operator-controller-manager-67b5d44b7f-6r9tp" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.317760 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-858cd69f49-7pldk"] Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.330882 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-9fc8d5567-xxc4j"] Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.332238 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-9fc8d5567-xxc4j" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.332915 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rsbxt\" (UniqueName: \"kubernetes.io/projected/01b3e1fb-fbfd-48c6-a89f-cc347df3a24b-kube-api-access-rsbxt\") pod \"designate-operator-controller-manager-7d74f4d695-jdvhr\" (UID: \"01b3e1fb-fbfd-48c6-a89f-cc347df3a24b\") " pod="openstack-operators/designate-operator-controller-manager-7d74f4d695-jdvhr" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.341586 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-shwbw" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.349962 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7bf498966c-8fwmh"] Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.351107 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7bf498966c-8fwmh" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.353221 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c885v\" (UniqueName: \"kubernetes.io/projected/a5133455-fda2-4b98-9465-8421aae72e9c-kube-api-access-c885v\") pod \"cinder-operator-controller-manager-748c574d75-cssz9\" (UID: \"a5133455-fda2-4b98-9465-8421aae72e9c\") " pod="openstack-operators/cinder-operator-controller-manager-748c574d75-cssz9" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.354815 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-mzg59" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.366403 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-9fc8d5567-xxc4j"] Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.369992 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-56cf9c6b99-ll5vc"] Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.371384 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-56cf9c6b99-ll5vc" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.374505 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-9779w" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.400324 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-86p25\" (UniqueName: \"kubernetes.io/projected/618315d7-82b7-469d-ba71-a3fbb71ae08c-kube-api-access-86p25\") pod \"heat-operator-controller-manager-8ff95898-z2ltj\" (UID: \"618315d7-82b7-469d-ba71-a3fbb71ae08c\") " pod="openstack-operators/heat-operator-controller-manager-8ff95898-z2ltj" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.400386 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-22gph\" (UniqueName: \"kubernetes.io/projected/c9015e54-4a8c-4d07-ae64-74c380a50a22-kube-api-access-22gph\") pod \"horizon-operator-controller-manager-695847bc78-gnhwx\" (UID: \"c9015e54-4a8c-4d07-ae64-74c380a50a22\") " pod="openstack-operators/horizon-operator-controller-manager-695847bc78-gnhwx" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.401347 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7bf498966c-8fwmh"] Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.411869 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-56cf9c6b99-ll5vc"] Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.417738 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-86p25\" (UniqueName: \"kubernetes.io/projected/618315d7-82b7-469d-ba71-a3fbb71ae08c-kube-api-access-86p25\") pod \"heat-operator-controller-manager-8ff95898-z2ltj\" (UID: \"618315d7-82b7-469d-ba71-a3fbb71ae08c\") " pod="openstack-operators/heat-operator-controller-manager-8ff95898-z2ltj" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.445129 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-687b9cf756-d7n87"] Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.446143 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-687b9cf756-d7n87" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.449015 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-6495d75b5-dwvhq" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.456034 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-c4b8t" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.456151 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-54d766c9f9-lbx77"] Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.457544 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-54d766c9f9-lbx77" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.459937 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-2vpzz" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.462422 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-687b9cf756-d7n87"] Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.472033 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-748c574d75-cssz9" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.479681 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-c7c776c96-b86j5"] Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.480825 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-b86j5" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.483590 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-4ppkn" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.494867 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-7d74f4d695-jdvhr" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.503246 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-22gph\" (UniqueName: \"kubernetes.io/projected/c9015e54-4a8c-4d07-ae64-74c380a50a22-kube-api-access-22gph\") pod \"horizon-operator-controller-manager-695847bc78-gnhwx\" (UID: \"c9015e54-4a8c-4d07-ae64-74c380a50a22\") " pod="openstack-operators/horizon-operator-controller-manager-695847bc78-gnhwx" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.503301 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ft8cg\" (UniqueName: \"kubernetes.io/projected/31462d03-e504-4493-af67-3a5bc9eee5f7-kube-api-access-ft8cg\") pod \"ironic-operator-controller-manager-9fc8d5567-xxc4j\" (UID: \"31462d03-e504-4493-af67-3a5bc9eee5f7\") " pod="openstack-operators/ironic-operator-controller-manager-9fc8d5567-xxc4j" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.503346 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a6be3c3e-f7d0-4134-8fb7-c24ee5c13c2c-cert\") pod \"infra-operator-controller-manager-858cd69f49-7pldk\" (UID: \"a6be3c3e-f7d0-4134-8fb7-c24ee5c13c2c\") " pod="openstack-operators/infra-operator-controller-manager-858cd69f49-7pldk" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.503373 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cpzvk\" (UniqueName: \"kubernetes.io/projected/d2ec9063-4ca4-4280-b98c-198da389f005-kube-api-access-cpzvk\") pod \"keystone-operator-controller-manager-7bf498966c-8fwmh\" (UID: \"d2ec9063-4ca4-4280-b98c-198da389f005\") " pod="openstack-operators/keystone-operator-controller-manager-7bf498966c-8fwmh" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.503431 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rz29n\" (UniqueName: \"kubernetes.io/projected/ad0344fd-e85f-41bc-88da-d38d5ce5add8-kube-api-access-rz29n\") pod \"manila-operator-controller-manager-56cf9c6b99-ll5vc\" (UID: \"ad0344fd-e85f-41bc-88da-d38d5ce5add8\") " pod="openstack-operators/manila-operator-controller-manager-56cf9c6b99-ll5vc" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.503456 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pcz87\" (UniqueName: \"kubernetes.io/projected/a6be3c3e-f7d0-4134-8fb7-c24ee5c13c2c-kube-api-access-pcz87\") pod \"infra-operator-controller-manager-858cd69f49-7pldk\" (UID: \"a6be3c3e-f7d0-4134-8fb7-c24ee5c13c2c\") " pod="openstack-operators/infra-operator-controller-manager-858cd69f49-7pldk" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.509321 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-54d766c9f9-lbx77"] Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.514322 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-c7c776c96-b86j5"] Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.519186 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-6rzg7"] Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.520403 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-6rzg7" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.522370 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-67b5d44b7f-6r9tp" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.522583 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-8h45g" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.522370 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-22gph\" (UniqueName: \"kubernetes.io/projected/c9015e54-4a8c-4d07-ae64-74c380a50a22-kube-api-access-22gph\") pod \"horizon-operator-controller-manager-695847bc78-gnhwx\" (UID: \"c9015e54-4a8c-4d07-ae64-74c380a50a22\") " pod="openstack-operators/horizon-operator-controller-manager-695847bc78-gnhwx" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.527823 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-6rzg7"] Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.531976 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-g6nvc"] Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.534022 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-g6nvc" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.536396 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-wpxf5" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.536588 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.543412 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-5f95c46c78-7vw66"] Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.544409 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-5f95c46c78-7vw66" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.546701 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-g6nvc"] Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.547476 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-sbw9k" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.549361 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-774b97b48-pzd6k"] Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.550277 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-774b97b48-pzd6k" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.551657 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-9cvbx" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.557641 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-bc7dc7bd9-2zb2m"] Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.561605 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-8ff95898-z2ltj" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.561922 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-2zb2m" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.574954 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-695847bc78-gnhwx" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.575128 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-2877g" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.581438 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-774b97b48-pzd6k"] Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.593380 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-5f95c46c78-7vw66"] Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.599314 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-bc7dc7bd9-2zb2m"] Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.609844 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p84wj\" (UniqueName: \"kubernetes.io/projected/37f9c445-f9e3-47a6-9cc5-63133c13e09f-kube-api-access-p84wj\") pod \"placement-operator-controller-manager-774b97b48-pzd6k\" (UID: \"37f9c445-f9e3-47a6-9cc5-63133c13e09f\") " pod="openstack-operators/placement-operator-controller-manager-774b97b48-pzd6k" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.609885 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-77pgv\" (UniqueName: \"kubernetes.io/projected/2bf3c066-8608-4d04-8c2a-7570b23edebe-kube-api-access-77pgv\") pod \"mariadb-operator-controller-manager-687b9cf756-d7n87\" (UID: \"2bf3c066-8608-4d04-8c2a-7570b23edebe\") " pod="openstack-operators/mariadb-operator-controller-manager-687b9cf756-d7n87" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.609944 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rmzfw\" (UniqueName: \"kubernetes.io/projected/902219f3-1427-4cf0-9b3f-5879caf3e30d-kube-api-access-rmzfw\") pod \"nova-operator-controller-manager-c7c776c96-b86j5\" (UID: \"902219f3-1427-4cf0-9b3f-5879caf3e30d\") " pod="openstack-operators/nova-operator-controller-manager-c7c776c96-b86j5" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.609973 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-66jxb\" (UniqueName: \"kubernetes.io/projected/fd02f073-8084-4117-b444-292a0e41e629-kube-api-access-66jxb\") pod \"octavia-operator-controller-manager-76fcc6dc7c-6rzg7\" (UID: \"fd02f073-8084-4117-b444-292a0e41e629\") " pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-6rzg7" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.610006 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7731544d-11b0-44ef-8a11-163cd6e9cb53-cert\") pod \"openstack-baremetal-operator-controller-manager-6d776955-g6nvc\" (UID: \"7731544d-11b0-44ef-8a11-163cd6e9cb53\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-g6nvc" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.610026 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-522pw\" (UniqueName: \"kubernetes.io/projected/fa1b2d18-b1e3-410e-864f-84c2d892474a-kube-api-access-522pw\") pod \"neutron-operator-controller-manager-54d766c9f9-lbx77\" (UID: \"fa1b2d18-b1e3-410e-864f-84c2d892474a\") " pod="openstack-operators/neutron-operator-controller-manager-54d766c9f9-lbx77" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.610067 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ft8cg\" (UniqueName: \"kubernetes.io/projected/31462d03-e504-4493-af67-3a5bc9eee5f7-kube-api-access-ft8cg\") pod \"ironic-operator-controller-manager-9fc8d5567-xxc4j\" (UID: \"31462d03-e504-4493-af67-3a5bc9eee5f7\") " pod="openstack-operators/ironic-operator-controller-manager-9fc8d5567-xxc4j" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.610104 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a6be3c3e-f7d0-4134-8fb7-c24ee5c13c2c-cert\") pod \"infra-operator-controller-manager-858cd69f49-7pldk\" (UID: \"a6be3c3e-f7d0-4134-8fb7-c24ee5c13c2c\") " pod="openstack-operators/infra-operator-controller-manager-858cd69f49-7pldk" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.610136 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cpzvk\" (UniqueName: \"kubernetes.io/projected/d2ec9063-4ca4-4280-b98c-198da389f005-kube-api-access-cpzvk\") pod \"keystone-operator-controller-manager-7bf498966c-8fwmh\" (UID: \"d2ec9063-4ca4-4280-b98c-198da389f005\") " pod="openstack-operators/keystone-operator-controller-manager-7bf498966c-8fwmh" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.610183 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4qjpj\" (UniqueName: \"kubernetes.io/projected/2520e0fa-6109-4297-b41b-ff1de862f6a1-kube-api-access-4qjpj\") pod \"swift-operator-controller-manager-bc7dc7bd9-2zb2m\" (UID: \"2520e0fa-6109-4297-b41b-ff1de862f6a1\") " pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-2zb2m" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.610226 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x8dwl\" (UniqueName: \"kubernetes.io/projected/a89433a3-b8b0-4c71-ad8e-32dd617dc69e-kube-api-access-x8dwl\") pod \"ovn-operator-controller-manager-5f95c46c78-7vw66\" (UID: \"a89433a3-b8b0-4c71-ad8e-32dd617dc69e\") " pod="openstack-operators/ovn-operator-controller-manager-5f95c46c78-7vw66" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.610247 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7jrfh\" (UniqueName: \"kubernetes.io/projected/7731544d-11b0-44ef-8a11-163cd6e9cb53-kube-api-access-7jrfh\") pod \"openstack-baremetal-operator-controller-manager-6d776955-g6nvc\" (UID: \"7731544d-11b0-44ef-8a11-163cd6e9cb53\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-g6nvc" Sep 29 09:44:09 crc kubenswrapper[4779]: E0929 09:44:09.610370 4779 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Sep 29 09:44:09 crc kubenswrapper[4779]: E0929 09:44:09.610435 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a6be3c3e-f7d0-4134-8fb7-c24ee5c13c2c-cert podName:a6be3c3e-f7d0-4134-8fb7-c24ee5c13c2c nodeName:}" failed. No retries permitted until 2025-09-29 09:44:10.11041444 +0000 UTC m=+882.091738454 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/a6be3c3e-f7d0-4134-8fb7-c24ee5c13c2c-cert") pod "infra-operator-controller-manager-858cd69f49-7pldk" (UID: "a6be3c3e-f7d0-4134-8fb7-c24ee5c13c2c") : secret "infra-operator-webhook-server-cert" not found Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.610672 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rz29n\" (UniqueName: \"kubernetes.io/projected/ad0344fd-e85f-41bc-88da-d38d5ce5add8-kube-api-access-rz29n\") pod \"manila-operator-controller-manager-56cf9c6b99-ll5vc\" (UID: \"ad0344fd-e85f-41bc-88da-d38d5ce5add8\") " pod="openstack-operators/manila-operator-controller-manager-56cf9c6b99-ll5vc" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.610693 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pcz87\" (UniqueName: \"kubernetes.io/projected/a6be3c3e-f7d0-4134-8fb7-c24ee5c13c2c-kube-api-access-pcz87\") pod \"infra-operator-controller-manager-858cd69f49-7pldk\" (UID: \"a6be3c3e-f7d0-4134-8fb7-c24ee5c13c2c\") " pod="openstack-operators/infra-operator-controller-manager-858cd69f49-7pldk" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.636859 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cpzvk\" (UniqueName: \"kubernetes.io/projected/d2ec9063-4ca4-4280-b98c-198da389f005-kube-api-access-cpzvk\") pod \"keystone-operator-controller-manager-7bf498966c-8fwmh\" (UID: \"d2ec9063-4ca4-4280-b98c-198da389f005\") " pod="openstack-operators/keystone-operator-controller-manager-7bf498966c-8fwmh" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.639415 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ft8cg\" (UniqueName: \"kubernetes.io/projected/31462d03-e504-4493-af67-3a5bc9eee5f7-kube-api-access-ft8cg\") pod \"ironic-operator-controller-manager-9fc8d5567-xxc4j\" (UID: \"31462d03-e504-4493-af67-3a5bc9eee5f7\") " pod="openstack-operators/ironic-operator-controller-manager-9fc8d5567-xxc4j" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.640581 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rz29n\" (UniqueName: \"kubernetes.io/projected/ad0344fd-e85f-41bc-88da-d38d5ce5add8-kube-api-access-rz29n\") pod \"manila-operator-controller-manager-56cf9c6b99-ll5vc\" (UID: \"ad0344fd-e85f-41bc-88da-d38d5ce5add8\") " pod="openstack-operators/manila-operator-controller-manager-56cf9c6b99-ll5vc" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.647820 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-5bf96cfbc4-8gzc8"] Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.650975 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pcz87\" (UniqueName: \"kubernetes.io/projected/a6be3c3e-f7d0-4134-8fb7-c24ee5c13c2c-kube-api-access-pcz87\") pod \"infra-operator-controller-manager-858cd69f49-7pldk\" (UID: \"a6be3c3e-f7d0-4134-8fb7-c24ee5c13c2c\") " pod="openstack-operators/infra-operator-controller-manager-858cd69f49-7pldk" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.684861 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-5bf96cfbc4-8gzc8" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.696344 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-4gqf5" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.714100 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4qjpj\" (UniqueName: \"kubernetes.io/projected/2520e0fa-6109-4297-b41b-ff1de862f6a1-kube-api-access-4qjpj\") pod \"swift-operator-controller-manager-bc7dc7bd9-2zb2m\" (UID: \"2520e0fa-6109-4297-b41b-ff1de862f6a1\") " pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-2zb2m" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.714258 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x8dwl\" (UniqueName: \"kubernetes.io/projected/a89433a3-b8b0-4c71-ad8e-32dd617dc69e-kube-api-access-x8dwl\") pod \"ovn-operator-controller-manager-5f95c46c78-7vw66\" (UID: \"a89433a3-b8b0-4c71-ad8e-32dd617dc69e\") " pod="openstack-operators/ovn-operator-controller-manager-5f95c46c78-7vw66" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.714284 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7jrfh\" (UniqueName: \"kubernetes.io/projected/7731544d-11b0-44ef-8a11-163cd6e9cb53-kube-api-access-7jrfh\") pod \"openstack-baremetal-operator-controller-manager-6d776955-g6nvc\" (UID: \"7731544d-11b0-44ef-8a11-163cd6e9cb53\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-g6nvc" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.714336 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p84wj\" (UniqueName: \"kubernetes.io/projected/37f9c445-f9e3-47a6-9cc5-63133c13e09f-kube-api-access-p84wj\") pod \"placement-operator-controller-manager-774b97b48-pzd6k\" (UID: \"37f9c445-f9e3-47a6-9cc5-63133c13e09f\") " pod="openstack-operators/placement-operator-controller-manager-774b97b48-pzd6k" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.714361 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-77pgv\" (UniqueName: \"kubernetes.io/projected/2bf3c066-8608-4d04-8c2a-7570b23edebe-kube-api-access-77pgv\") pod \"mariadb-operator-controller-manager-687b9cf756-d7n87\" (UID: \"2bf3c066-8608-4d04-8c2a-7570b23edebe\") " pod="openstack-operators/mariadb-operator-controller-manager-687b9cf756-d7n87" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.714397 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rmzfw\" (UniqueName: \"kubernetes.io/projected/902219f3-1427-4cf0-9b3f-5879caf3e30d-kube-api-access-rmzfw\") pod \"nova-operator-controller-manager-c7c776c96-b86j5\" (UID: \"902219f3-1427-4cf0-9b3f-5879caf3e30d\") " pod="openstack-operators/nova-operator-controller-manager-c7c776c96-b86j5" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.714447 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-66jxb\" (UniqueName: \"kubernetes.io/projected/fd02f073-8084-4117-b444-292a0e41e629-kube-api-access-66jxb\") pod \"octavia-operator-controller-manager-76fcc6dc7c-6rzg7\" (UID: \"fd02f073-8084-4117-b444-292a0e41e629\") " pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-6rzg7" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.714478 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7731544d-11b0-44ef-8a11-163cd6e9cb53-cert\") pod \"openstack-baremetal-operator-controller-manager-6d776955-g6nvc\" (UID: \"7731544d-11b0-44ef-8a11-163cd6e9cb53\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-g6nvc" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.714507 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-522pw\" (UniqueName: \"kubernetes.io/projected/fa1b2d18-b1e3-410e-864f-84c2d892474a-kube-api-access-522pw\") pod \"neutron-operator-controller-manager-54d766c9f9-lbx77\" (UID: \"fa1b2d18-b1e3-410e-864f-84c2d892474a\") " pod="openstack-operators/neutron-operator-controller-manager-54d766c9f9-lbx77" Sep 29 09:44:09 crc kubenswrapper[4779]: E0929 09:44:09.715400 4779 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Sep 29 09:44:09 crc kubenswrapper[4779]: E0929 09:44:09.715450 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7731544d-11b0-44ef-8a11-163cd6e9cb53-cert podName:7731544d-11b0-44ef-8a11-163cd6e9cb53 nodeName:}" failed. No retries permitted until 2025-09-29 09:44:10.215435656 +0000 UTC m=+882.196759560 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/7731544d-11b0-44ef-8a11-163cd6e9cb53-cert") pod "openstack-baremetal-operator-controller-manager-6d776955-g6nvc" (UID: "7731544d-11b0-44ef-8a11-163cd6e9cb53") : secret "openstack-baremetal-operator-webhook-server-cert" not found Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.743643 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p84wj\" (UniqueName: \"kubernetes.io/projected/37f9c445-f9e3-47a6-9cc5-63133c13e09f-kube-api-access-p84wj\") pod \"placement-operator-controller-manager-774b97b48-pzd6k\" (UID: \"37f9c445-f9e3-47a6-9cc5-63133c13e09f\") " pod="openstack-operators/placement-operator-controller-manager-774b97b48-pzd6k" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.743752 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7jrfh\" (UniqueName: \"kubernetes.io/projected/7731544d-11b0-44ef-8a11-163cd6e9cb53-kube-api-access-7jrfh\") pod \"openstack-baremetal-operator-controller-manager-6d776955-g6nvc\" (UID: \"7731544d-11b0-44ef-8a11-163cd6e9cb53\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-g6nvc" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.751281 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-522pw\" (UniqueName: \"kubernetes.io/projected/fa1b2d18-b1e3-410e-864f-84c2d892474a-kube-api-access-522pw\") pod \"neutron-operator-controller-manager-54d766c9f9-lbx77\" (UID: \"fa1b2d18-b1e3-410e-864f-84c2d892474a\") " pod="openstack-operators/neutron-operator-controller-manager-54d766c9f9-lbx77" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.759861 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-9fc8d5567-xxc4j" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.765765 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rmzfw\" (UniqueName: \"kubernetes.io/projected/902219f3-1427-4cf0-9b3f-5879caf3e30d-kube-api-access-rmzfw\") pod \"nova-operator-controller-manager-c7c776c96-b86j5\" (UID: \"902219f3-1427-4cf0-9b3f-5879caf3e30d\") " pod="openstack-operators/nova-operator-controller-manager-c7c776c96-b86j5" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.765784 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-77pgv\" (UniqueName: \"kubernetes.io/projected/2bf3c066-8608-4d04-8c2a-7570b23edebe-kube-api-access-77pgv\") pod \"mariadb-operator-controller-manager-687b9cf756-d7n87\" (UID: \"2bf3c066-8608-4d04-8c2a-7570b23edebe\") " pod="openstack-operators/mariadb-operator-controller-manager-687b9cf756-d7n87" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.766060 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-66jxb\" (UniqueName: \"kubernetes.io/projected/fd02f073-8084-4117-b444-292a0e41e629-kube-api-access-66jxb\") pod \"octavia-operator-controller-manager-76fcc6dc7c-6rzg7\" (UID: \"fd02f073-8084-4117-b444-292a0e41e629\") " pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-6rzg7" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.766116 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4qjpj\" (UniqueName: \"kubernetes.io/projected/2520e0fa-6109-4297-b41b-ff1de862f6a1-kube-api-access-4qjpj\") pod \"swift-operator-controller-manager-bc7dc7bd9-2zb2m\" (UID: \"2520e0fa-6109-4297-b41b-ff1de862f6a1\") " pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-2zb2m" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.766243 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x8dwl\" (UniqueName: \"kubernetes.io/projected/a89433a3-b8b0-4c71-ad8e-32dd617dc69e-kube-api-access-x8dwl\") pod \"ovn-operator-controller-manager-5f95c46c78-7vw66\" (UID: \"a89433a3-b8b0-4c71-ad8e-32dd617dc69e\") " pod="openstack-operators/ovn-operator-controller-manager-5f95c46c78-7vw66" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.769213 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7bf498966c-8fwmh" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.791716 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-5bf96cfbc4-8gzc8"] Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.792177 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-56cf9c6b99-ll5vc" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.810866 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-687b9cf756-d7n87" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.816264 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j87st\" (UniqueName: \"kubernetes.io/projected/7817ca80-efc5-49a0-ba11-daffb918491e-kube-api-access-j87st\") pod \"telemetry-operator-controller-manager-5bf96cfbc4-8gzc8\" (UID: \"7817ca80-efc5-49a0-ba11-daffb918491e\") " pod="openstack-operators/telemetry-operator-controller-manager-5bf96cfbc4-8gzc8" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.837605 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-f66b554c6-28w58"] Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.839139 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-f66b554c6-28w58" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.852229 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-f66b554c6-28w58"] Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.859121 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-5xbmw" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.860397 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-54d766c9f9-lbx77" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.881977 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-6f4f448b94-qhpm7"] Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.883288 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-6f4f448b94-qhpm7" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.885127 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-b86j5" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.886608 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-t8sjk" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.898947 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-6f4f448b94-qhpm7"] Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.909781 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-6rzg7" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.917715 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j87st\" (UniqueName: \"kubernetes.io/projected/7817ca80-efc5-49a0-ba11-daffb918491e-kube-api-access-j87st\") pod \"telemetry-operator-controller-manager-5bf96cfbc4-8gzc8\" (UID: \"7817ca80-efc5-49a0-ba11-daffb918491e\") " pod="openstack-operators/telemetry-operator-controller-manager-5bf96cfbc4-8gzc8" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.940062 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-84d66d6d97-m2nkr"] Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.942852 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-84d66d6d97-m2nkr" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.945608 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-hqmj7" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.946097 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.947165 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-5f95c46c78-7vw66" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.954635 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-84d66d6d97-m2nkr"] Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.970481 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j87st\" (UniqueName: \"kubernetes.io/projected/7817ca80-efc5-49a0-ba11-daffb918491e-kube-api-access-j87st\") pod \"telemetry-operator-controller-manager-5bf96cfbc4-8gzc8\" (UID: \"7817ca80-efc5-49a0-ba11-daffb918491e\") " pod="openstack-operators/telemetry-operator-controller-manager-5bf96cfbc4-8gzc8" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.985401 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-kgz2h"] Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.986340 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-kgz2h" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.987198 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-774b97b48-pzd6k" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.991172 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-mrxcn" Sep 29 09:44:09 crc kubenswrapper[4779]: I0929 09:44:09.991661 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-kgz2h"] Sep 29 09:44:10 crc kubenswrapper[4779]: I0929 09:44:10.020093 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pzlng\" (UniqueName: \"kubernetes.io/projected/b4e41036-b598-4c28-b813-3fec8f90be39-kube-api-access-pzlng\") pod \"test-operator-controller-manager-f66b554c6-28w58\" (UID: \"b4e41036-b598-4c28-b813-3fec8f90be39\") " pod="openstack-operators/test-operator-controller-manager-f66b554c6-28w58" Sep 29 09:44:10 crc kubenswrapper[4779]: I0929 09:44:10.020180 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-svnqn\" (UniqueName: \"kubernetes.io/projected/84dd49ae-5773-4135-886d-9f3c5a7c7b4b-kube-api-access-svnqn\") pod \"watcher-operator-controller-manager-6f4f448b94-qhpm7\" (UID: \"84dd49ae-5773-4135-886d-9f3c5a7c7b4b\") " pod="openstack-operators/watcher-operator-controller-manager-6f4f448b94-qhpm7" Sep 29 09:44:10 crc kubenswrapper[4779]: I0929 09:44:10.020884 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-2zb2m" Sep 29 09:44:10 crc kubenswrapper[4779]: I0929 09:44:10.069242 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-5bf96cfbc4-8gzc8" Sep 29 09:44:10 crc kubenswrapper[4779]: I0929 09:44:10.105504 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-67b5d44b7f-6r9tp"] Sep 29 09:44:10 crc kubenswrapper[4779]: I0929 09:44:10.116447 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-6495d75b5-dwvhq"] Sep 29 09:44:10 crc kubenswrapper[4779]: I0929 09:44:10.116495 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-748c574d75-cssz9"] Sep 29 09:44:10 crc kubenswrapper[4779]: I0929 09:44:10.122799 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pzlng\" (UniqueName: \"kubernetes.io/projected/b4e41036-b598-4c28-b813-3fec8f90be39-kube-api-access-pzlng\") pod \"test-operator-controller-manager-f66b554c6-28w58\" (UID: \"b4e41036-b598-4c28-b813-3fec8f90be39\") " pod="openstack-operators/test-operator-controller-manager-f66b554c6-28w58" Sep 29 09:44:10 crc kubenswrapper[4779]: I0929 09:44:10.122867 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bspmm\" (UniqueName: \"kubernetes.io/projected/533fb7be-d888-4af5-8533-4dd61056500c-kube-api-access-bspmm\") pod \"openstack-operator-controller-manager-84d66d6d97-m2nkr\" (UID: \"533fb7be-d888-4af5-8533-4dd61056500c\") " pod="openstack-operators/openstack-operator-controller-manager-84d66d6d97-m2nkr" Sep 29 09:44:10 crc kubenswrapper[4779]: I0929 09:44:10.122893 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a6be3c3e-f7d0-4134-8fb7-c24ee5c13c2c-cert\") pod \"infra-operator-controller-manager-858cd69f49-7pldk\" (UID: \"a6be3c3e-f7d0-4134-8fb7-c24ee5c13c2c\") " pod="openstack-operators/infra-operator-controller-manager-858cd69f49-7pldk" Sep 29 09:44:10 crc kubenswrapper[4779]: I0929 09:44:10.122940 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qsc45\" (UniqueName: \"kubernetes.io/projected/90b2d814-d613-4ee6-bbce-23aad07f8d1c-kube-api-access-qsc45\") pod \"rabbitmq-cluster-operator-manager-79d8469568-kgz2h\" (UID: \"90b2d814-d613-4ee6-bbce-23aad07f8d1c\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-kgz2h" Sep 29 09:44:10 crc kubenswrapper[4779]: I0929 09:44:10.122958 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/533fb7be-d888-4af5-8533-4dd61056500c-cert\") pod \"openstack-operator-controller-manager-84d66d6d97-m2nkr\" (UID: \"533fb7be-d888-4af5-8533-4dd61056500c\") " pod="openstack-operators/openstack-operator-controller-manager-84d66d6d97-m2nkr" Sep 29 09:44:10 crc kubenswrapper[4779]: I0929 09:44:10.122981 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-svnqn\" (UniqueName: \"kubernetes.io/projected/84dd49ae-5773-4135-886d-9f3c5a7c7b4b-kube-api-access-svnqn\") pod \"watcher-operator-controller-manager-6f4f448b94-qhpm7\" (UID: \"84dd49ae-5773-4135-886d-9f3c5a7c7b4b\") " pod="openstack-operators/watcher-operator-controller-manager-6f4f448b94-qhpm7" Sep 29 09:44:10 crc kubenswrapper[4779]: E0929 09:44:10.123424 4779 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Sep 29 09:44:10 crc kubenswrapper[4779]: E0929 09:44:10.123462 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a6be3c3e-f7d0-4134-8fb7-c24ee5c13c2c-cert podName:a6be3c3e-f7d0-4134-8fb7-c24ee5c13c2c nodeName:}" failed. No retries permitted until 2025-09-29 09:44:11.123448838 +0000 UTC m=+883.104772742 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/a6be3c3e-f7d0-4134-8fb7-c24ee5c13c2c-cert") pod "infra-operator-controller-manager-858cd69f49-7pldk" (UID: "a6be3c3e-f7d0-4134-8fb7-c24ee5c13c2c") : secret "infra-operator-webhook-server-cert" not found Sep 29 09:44:10 crc kubenswrapper[4779]: I0929 09:44:10.141169 4779 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 09:44:10 crc kubenswrapper[4779]: I0929 09:44:10.154743 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-svnqn\" (UniqueName: \"kubernetes.io/projected/84dd49ae-5773-4135-886d-9f3c5a7c7b4b-kube-api-access-svnqn\") pod \"watcher-operator-controller-manager-6f4f448b94-qhpm7\" (UID: \"84dd49ae-5773-4135-886d-9f3c5a7c7b4b\") " pod="openstack-operators/watcher-operator-controller-manager-6f4f448b94-qhpm7" Sep 29 09:44:10 crc kubenswrapper[4779]: I0929 09:44:10.154740 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pzlng\" (UniqueName: \"kubernetes.io/projected/b4e41036-b598-4c28-b813-3fec8f90be39-kube-api-access-pzlng\") pod \"test-operator-controller-manager-f66b554c6-28w58\" (UID: \"b4e41036-b598-4c28-b813-3fec8f90be39\") " pod="openstack-operators/test-operator-controller-manager-f66b554c6-28w58" Sep 29 09:44:10 crc kubenswrapper[4779]: I0929 09:44:10.182392 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-f66b554c6-28w58" Sep 29 09:44:10 crc kubenswrapper[4779]: I0929 09:44:10.208184 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-6f4f448b94-qhpm7" Sep 29 09:44:10 crc kubenswrapper[4779]: I0929 09:44:10.223939 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7731544d-11b0-44ef-8a11-163cd6e9cb53-cert\") pod \"openstack-baremetal-operator-controller-manager-6d776955-g6nvc\" (UID: \"7731544d-11b0-44ef-8a11-163cd6e9cb53\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-g6nvc" Sep 29 09:44:10 crc kubenswrapper[4779]: I0929 09:44:10.223992 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bspmm\" (UniqueName: \"kubernetes.io/projected/533fb7be-d888-4af5-8533-4dd61056500c-kube-api-access-bspmm\") pod \"openstack-operator-controller-manager-84d66d6d97-m2nkr\" (UID: \"533fb7be-d888-4af5-8533-4dd61056500c\") " pod="openstack-operators/openstack-operator-controller-manager-84d66d6d97-m2nkr" Sep 29 09:44:10 crc kubenswrapper[4779]: I0929 09:44:10.224027 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qsc45\" (UniqueName: \"kubernetes.io/projected/90b2d814-d613-4ee6-bbce-23aad07f8d1c-kube-api-access-qsc45\") pod \"rabbitmq-cluster-operator-manager-79d8469568-kgz2h\" (UID: \"90b2d814-d613-4ee6-bbce-23aad07f8d1c\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-kgz2h" Sep 29 09:44:10 crc kubenswrapper[4779]: I0929 09:44:10.224045 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/533fb7be-d888-4af5-8533-4dd61056500c-cert\") pod \"openstack-operator-controller-manager-84d66d6d97-m2nkr\" (UID: \"533fb7be-d888-4af5-8533-4dd61056500c\") " pod="openstack-operators/openstack-operator-controller-manager-84d66d6d97-m2nkr" Sep 29 09:44:10 crc kubenswrapper[4779]: E0929 09:44:10.224168 4779 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Sep 29 09:44:10 crc kubenswrapper[4779]: E0929 09:44:10.224214 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/533fb7be-d888-4af5-8533-4dd61056500c-cert podName:533fb7be-d888-4af5-8533-4dd61056500c nodeName:}" failed. No retries permitted until 2025-09-29 09:44:10.72419978 +0000 UTC m=+882.705523684 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/533fb7be-d888-4af5-8533-4dd61056500c-cert") pod "openstack-operator-controller-manager-84d66d6d97-m2nkr" (UID: "533fb7be-d888-4af5-8533-4dd61056500c") : secret "webhook-server-cert" not found Sep 29 09:44:10 crc kubenswrapper[4779]: I0929 09:44:10.235856 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7731544d-11b0-44ef-8a11-163cd6e9cb53-cert\") pod \"openstack-baremetal-operator-controller-manager-6d776955-g6nvc\" (UID: \"7731544d-11b0-44ef-8a11-163cd6e9cb53\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-g6nvc" Sep 29 09:44:10 crc kubenswrapper[4779]: I0929 09:44:10.258069 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-7d74f4d695-jdvhr"] Sep 29 09:44:10 crc kubenswrapper[4779]: I0929 09:44:10.258992 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bspmm\" (UniqueName: \"kubernetes.io/projected/533fb7be-d888-4af5-8533-4dd61056500c-kube-api-access-bspmm\") pod \"openstack-operator-controller-manager-84d66d6d97-m2nkr\" (UID: \"533fb7be-d888-4af5-8533-4dd61056500c\") " pod="openstack-operators/openstack-operator-controller-manager-84d66d6d97-m2nkr" Sep 29 09:44:10 crc kubenswrapper[4779]: I0929 09:44:10.259421 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qsc45\" (UniqueName: \"kubernetes.io/projected/90b2d814-d613-4ee6-bbce-23aad07f8d1c-kube-api-access-qsc45\") pod \"rabbitmq-cluster-operator-manager-79d8469568-kgz2h\" (UID: \"90b2d814-d613-4ee6-bbce-23aad07f8d1c\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-kgz2h" Sep 29 09:44:10 crc kubenswrapper[4779]: I0929 09:44:10.314163 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-kgz2h" Sep 29 09:44:10 crc kubenswrapper[4779]: I0929 09:44:10.428545 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-8ff95898-z2ltj"] Sep 29 09:44:10 crc kubenswrapper[4779]: I0929 09:44:10.529285 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-g6nvc" Sep 29 09:44:10 crc kubenswrapper[4779]: I0929 09:44:10.706536 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-56cf9c6b99-ll5vc"] Sep 29 09:44:10 crc kubenswrapper[4779]: I0929 09:44:10.712683 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7bf498966c-8fwmh"] Sep 29 09:44:10 crc kubenswrapper[4779]: I0929 09:44:10.754751 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/533fb7be-d888-4af5-8533-4dd61056500c-cert\") pod \"openstack-operator-controller-manager-84d66d6d97-m2nkr\" (UID: \"533fb7be-d888-4af5-8533-4dd61056500c\") " pod="openstack-operators/openstack-operator-controller-manager-84d66d6d97-m2nkr" Sep 29 09:44:10 crc kubenswrapper[4779]: E0929 09:44:10.755030 4779 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Sep 29 09:44:10 crc kubenswrapper[4779]: E0929 09:44:10.755083 4779 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/533fb7be-d888-4af5-8533-4dd61056500c-cert podName:533fb7be-d888-4af5-8533-4dd61056500c nodeName:}" failed. No retries permitted until 2025-09-29 09:44:11.755067072 +0000 UTC m=+883.736390976 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/533fb7be-d888-4af5-8533-4dd61056500c-cert") pod "openstack-operator-controller-manager-84d66d6d97-m2nkr" (UID: "533fb7be-d888-4af5-8533-4dd61056500c") : secret "webhook-server-cert" not found Sep 29 09:44:10 crc kubenswrapper[4779]: I0929 09:44:10.780019 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-695847bc78-gnhwx"] Sep 29 09:44:10 crc kubenswrapper[4779]: I0929 09:44:10.806055 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-54d766c9f9-lbx77"] Sep 29 09:44:10 crc kubenswrapper[4779]: I0929 09:44:10.822857 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-9fc8d5567-xxc4j"] Sep 29 09:44:10 crc kubenswrapper[4779]: I0929 09:44:10.855036 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-687b9cf756-d7n87"] Sep 29 09:44:10 crc kubenswrapper[4779]: I0929 09:44:10.898886 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-54d766c9f9-lbx77" event={"ID":"fa1b2d18-b1e3-410e-864f-84c2d892474a","Type":"ContainerStarted","Data":"63744472284d9f6dd4a90d33ca98933bb6412de8349503d7b9ce12c8918b9d52"} Sep 29 09:44:10 crc kubenswrapper[4779]: I0929 09:44:10.903436 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7bf498966c-8fwmh" event={"ID":"d2ec9063-4ca4-4280-b98c-198da389f005","Type":"ContainerStarted","Data":"507163a54d2e7315b1fa49b98511d3344c82a8a4fa45792270e56bc4043a8aeb"} Sep 29 09:44:10 crc kubenswrapper[4779]: I0929 09:44:10.905185 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-6495d75b5-dwvhq" event={"ID":"02af413e-f4cb-413e-b029-5410ad2bb9ab","Type":"ContainerStarted","Data":"ce690cb642f6ffa6acf3002a26a55ba38c917687b4968ad88bb1bb1a5a0e5389"} Sep 29 09:44:10 crc kubenswrapper[4779]: I0929 09:44:10.907207 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-687b9cf756-d7n87" event={"ID":"2bf3c066-8608-4d04-8c2a-7570b23edebe","Type":"ContainerStarted","Data":"f5a3c9eb65077dbf05c0fad86583a0f28056f17f34b6696edb90e0cb1ea9b429"} Sep 29 09:44:10 crc kubenswrapper[4779]: I0929 09:44:10.910417 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-7d74f4d695-jdvhr" event={"ID":"01b3e1fb-fbfd-48c6-a89f-cc347df3a24b","Type":"ContainerStarted","Data":"07e47d4f8b5415b7486daefb3eb87eeb0c1595a139b3ddd8ee7da519fa997b89"} Sep 29 09:44:10 crc kubenswrapper[4779]: I0929 09:44:10.911868 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-8ff95898-z2ltj" event={"ID":"618315d7-82b7-469d-ba71-a3fbb71ae08c","Type":"ContainerStarted","Data":"ddb42b4b46748a1dc3d90a0d2fc2000123dabc95c7b12a9a141eeb5458b63b04"} Sep 29 09:44:10 crc kubenswrapper[4779]: I0929 09:44:10.923346 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-748c574d75-cssz9" event={"ID":"a5133455-fda2-4b98-9465-8421aae72e9c","Type":"ContainerStarted","Data":"eb62c7edd226dd51ab04abf923291fdb3dc561c5fa8050eb6dc80c284b024b76"} Sep 29 09:44:10 crc kubenswrapper[4779]: I0929 09:44:10.931368 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-56cf9c6b99-ll5vc" event={"ID":"ad0344fd-e85f-41bc-88da-d38d5ce5add8","Type":"ContainerStarted","Data":"8b756664ae9c744622e3257b9519d44a83e71d6c9baf248c43b04a0334b2c5b6"} Sep 29 09:44:10 crc kubenswrapper[4779]: I0929 09:44:10.932610 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-67b5d44b7f-6r9tp" event={"ID":"3ebfd5a9-ed4f-4589-900e-0c44346fece4","Type":"ContainerStarted","Data":"f24c81320d8c5cfe8d1b220e49345bf101724a67f570f7a23d9c1173be4edcf7"} Sep 29 09:44:11 crc kubenswrapper[4779]: I0929 09:44:11.128997 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-bc7dc7bd9-2zb2m"] Sep 29 09:44:11 crc kubenswrapper[4779]: I0929 09:44:11.144588 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-5f95c46c78-7vw66"] Sep 29 09:44:11 crc kubenswrapper[4779]: I0929 09:44:11.148790 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-5bf96cfbc4-8gzc8"] Sep 29 09:44:11 crc kubenswrapper[4779]: W0929 09:44:11.157027 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2520e0fa_6109_4297_b41b_ff1de862f6a1.slice/crio-94023add9c70b5347383578ede36bb3be161ed4f3665e64ba8743b05e0ce0ba6 WatchSource:0}: Error finding container 94023add9c70b5347383578ede36bb3be161ed4f3665e64ba8743b05e0ce0ba6: Status 404 returned error can't find the container with id 94023add9c70b5347383578ede36bb3be161ed4f3665e64ba8743b05e0ce0ba6 Sep 29 09:44:11 crc kubenswrapper[4779]: I0929 09:44:11.162103 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a6be3c3e-f7d0-4134-8fb7-c24ee5c13c2c-cert\") pod \"infra-operator-controller-manager-858cd69f49-7pldk\" (UID: \"a6be3c3e-f7d0-4134-8fb7-c24ee5c13c2c\") " pod="openstack-operators/infra-operator-controller-manager-858cd69f49-7pldk" Sep 29 09:44:11 crc kubenswrapper[4779]: I0929 09:44:11.167324 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-6f4f448b94-qhpm7"] Sep 29 09:44:11 crc kubenswrapper[4779]: I0929 09:44:11.173831 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a6be3c3e-f7d0-4134-8fb7-c24ee5c13c2c-cert\") pod \"infra-operator-controller-manager-858cd69f49-7pldk\" (UID: \"a6be3c3e-f7d0-4134-8fb7-c24ee5c13c2c\") " pod="openstack-operators/infra-operator-controller-manager-858cd69f49-7pldk" Sep 29 09:44:11 crc kubenswrapper[4779]: E0929 09:44:11.185997 4779 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:ae6fda8cafd6c3ab5d5e9c599d15b02ace61b8eacbac4de3df50427dfab6a0c0,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-j87st,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-5bf96cfbc4-8gzc8_openstack-operators(7817ca80-efc5-49a0-ba11-daffb918491e): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 29 09:44:11 crc kubenswrapper[4779]: I0929 09:44:11.186663 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-774b97b48-pzd6k"] Sep 29 09:44:11 crc kubenswrapper[4779]: I0929 09:44:11.191359 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-f66b554c6-28w58"] Sep 29 09:44:11 crc kubenswrapper[4779]: I0929 09:44:11.195963 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-c7c776c96-b86j5"] Sep 29 09:44:11 crc kubenswrapper[4779]: I0929 09:44:11.201670 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-6rzg7"] Sep 29 09:44:11 crc kubenswrapper[4779]: I0929 09:44:11.206051 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-kgz2h"] Sep 29 09:44:11 crc kubenswrapper[4779]: E0929 09:44:11.208690 4779 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:a303e460aec09217f90043b8ff19c01061af003b614833b33a593df9c00ddf80,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-pzlng,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-f66b554c6-28w58_openstack-operators(b4e41036-b598-4c28-b813-3fec8f90be39): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 29 09:44:11 crc kubenswrapper[4779]: I0929 09:44:11.209235 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-858cd69f49-7pldk" Sep 29 09:44:11 crc kubenswrapper[4779]: E0929 09:44:11.210094 4779 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:057de94f9afa340adc34f9b25f8007d9cd2ba71bc8b5d77aac522add53b7caef,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-rmzfw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-c7c776c96-b86j5_openstack-operators(902219f3-1427-4cf0-9b3f-5879caf3e30d): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 29 09:44:11 crc kubenswrapper[4779]: W0929 09:44:11.211028 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfd02f073_8084_4117_b444_292a0e41e629.slice/crio-be5a50078ad34f41d18c27851afb6ac648b1840c70f1ca8498df5d8d03fa18b3 WatchSource:0}: Error finding container be5a50078ad34f41d18c27851afb6ac648b1840c70f1ca8498df5d8d03fa18b3: Status 404 returned error can't find the container with id be5a50078ad34f41d18c27851afb6ac648b1840c70f1ca8498df5d8d03fa18b3 Sep 29 09:44:11 crc kubenswrapper[4779]: W0929 09:44:11.211514 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7731544d_11b0_44ef_8a11_163cd6e9cb53.slice/crio-b4a83062a3f2c91f71bbc31c35e511dadfdeac7f8128baa9a35187431b62e2ef WatchSource:0}: Error finding container b4a83062a3f2c91f71bbc31c35e511dadfdeac7f8128baa9a35187431b62e2ef: Status 404 returned error can't find the container with id b4a83062a3f2c91f71bbc31c35e511dadfdeac7f8128baa9a35187431b62e2ef Sep 29 09:44:11 crc kubenswrapper[4779]: I0929 09:44:11.211759 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-g6nvc"] Sep 29 09:44:11 crc kubenswrapper[4779]: E0929 09:44:11.218669 4779 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/octavia-operator@sha256:4d08afd31dc5ded10c54a5541f514ac351e9b40a183285b3db27d0757a6354c8,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-66jxb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-76fcc6dc7c-6rzg7_openstack-operators(fd02f073-8084-4117-b444-292a0e41e629): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 29 09:44:11 crc kubenswrapper[4779]: E0929 09:44:11.243331 4779 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:e3f947e9034a951620a76eaf41ceec95eefcef0eacb251b10993d6820d5e1af6,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:true,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-baremetal-operator-agent:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_ANSIBLEEE_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-ansibleee-runner:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_EVALUATOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-evaluator:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_LISTENER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-listener:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_NOTIFIER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-notifier:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_APACHE_IMAGE_URL_DEFAULT,Value:registry.redhat.io/ubi9/httpd-24:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_KEYSTONE_LISTENER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-keystone-listener:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-worker:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_CENTRAL_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_COMPUTE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_IPMI_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-ipmi:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_MYSQLD_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/prometheus/mysqld-exporter:v0.15.1,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_NOTIFICATION_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-notification:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_SGCORE_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/sg-core:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_BACKUP_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-backup:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-scheduler:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_VOLUME_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-volume:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_BACKENDBIND9_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-backend-bind9:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_CENTRAL_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-central:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_MDNS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-mdns:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_PRODUCER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-producer:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_UNBOUND_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-unbound:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-worker:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_FRR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-frr:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_ISCSID_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-iscsid:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_KEPLER_IMAGE_URL_DEFAULT,Value:quay.io/sustainable_computing_io/kepler:release-0.7.12,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_LOGROTATE_CROND_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cron:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_MULTIPATHD_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-multipathd:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_DHCP_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-dhcp-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_METADATA_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_OVN_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-ovn-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_SRIOV_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-sriov-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NODE_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/prometheus/node-exporter:v1.5.0,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_OVN_BGP_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-bgp-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_PODMAN_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/navidys/prometheus-podman-exporter:v1.10.1,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_GLANCE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-glance-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_CFNAPI_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-api-cfn:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_ENGINE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-engine:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HORIZON_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_INFRA_MEMCACHED_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-memcached:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_INFRA_REDIS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-redis:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_CONDUCTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-conductor:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_INSPECTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-inspector:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_NEUTRON_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-neutron-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_PXE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-pxe:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_PYTHON_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/ironic-python-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_KEYSTONE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-keystone:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_KSM_IMAGE_URL_DEFAULT,Value:registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-scheduler:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_SHARE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-share:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MARIADB_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NET_UTILS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-netutils:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NEUTRON_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_COMPUTE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-compute:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_CONDUCTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-conductor:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_NOVNC_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-novncproxy:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-scheduler:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_HEALTHMANAGER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-health-manager:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_HOUSEKEEPING_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-housekeeping:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_RSYSLOG_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-rsyslog:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-worker:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_CLIENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-openstackclient:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_LIGHTSPEED_IMAGE_URL_DEFAULT,Value:quay.io/openstack-lightspeed/rag-content:os-docs-2024.2,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_MUST_GATHER_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-must-gather:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_NETWORK_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OS_CONTAINER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/edpm-hardened-uefi:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_CONTROLLER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_CONTROLLER_OVS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-base:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_NB_DBCLUSTER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-nb-db-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_NORTHD_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-northd:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_SB_DBCLUSTER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-sb-db-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PLACEMENT_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-placement-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_RABBITMQ_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_ACCOUNT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-account:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_CONTAINER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-container:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_OBJECT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-object:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_PROXY_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-proxy-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_TEST_TEMPEST_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_APPLIER_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-applier:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_DECISION_ENGINE_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-decision-engine:current-podified,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:cert,ReadOnly:true,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7jrfh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-baremetal-operator-controller-manager-6d776955-g6nvc_openstack-operators(7731544d-11b0-44ef-8a11-163cd6e9cb53): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 29 09:44:11 crc kubenswrapper[4779]: E0929 09:44:11.243554 4779 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:225524223bf2a7f3a4ce95958fc9ca6fdab02745fb70374e8ff5bf1ddaceda4b,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-qsc45,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-79d8469568-kgz2h_openstack-operators(90b2d814-d613-4ee6-bbce-23aad07f8d1c): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 29 09:44:11 crc kubenswrapper[4779]: E0929 09:44:11.244883 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-kgz2h" podUID="90b2d814-d613-4ee6-bbce-23aad07f8d1c" Sep 29 09:44:11 crc kubenswrapper[4779]: E0929 09:44:11.555567 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-b86j5" podUID="902219f3-1427-4cf0-9b3f-5879caf3e30d" Sep 29 09:44:11 crc kubenswrapper[4779]: E0929 09:44:11.558288 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/test-operator-controller-manager-f66b554c6-28w58" podUID="b4e41036-b598-4c28-b813-3fec8f90be39" Sep 29 09:44:11 crc kubenswrapper[4779]: E0929 09:44:11.565946 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-g6nvc" podUID="7731544d-11b0-44ef-8a11-163cd6e9cb53" Sep 29 09:44:11 crc kubenswrapper[4779]: E0929 09:44:11.569183 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/telemetry-operator-controller-manager-5bf96cfbc4-8gzc8" podUID="7817ca80-efc5-49a0-ba11-daffb918491e" Sep 29 09:44:11 crc kubenswrapper[4779]: E0929 09:44:11.578627 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-6rzg7" podUID="fd02f073-8084-4117-b444-292a0e41e629" Sep 29 09:44:11 crc kubenswrapper[4779]: I0929 09:44:11.729145 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-858cd69f49-7pldk"] Sep 29 09:44:11 crc kubenswrapper[4779]: I0929 09:44:11.772132 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/533fb7be-d888-4af5-8533-4dd61056500c-cert\") pod \"openstack-operator-controller-manager-84d66d6d97-m2nkr\" (UID: \"533fb7be-d888-4af5-8533-4dd61056500c\") " pod="openstack-operators/openstack-operator-controller-manager-84d66d6d97-m2nkr" Sep 29 09:44:11 crc kubenswrapper[4779]: I0929 09:44:11.779126 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/533fb7be-d888-4af5-8533-4dd61056500c-cert\") pod \"openstack-operator-controller-manager-84d66d6d97-m2nkr\" (UID: \"533fb7be-d888-4af5-8533-4dd61056500c\") " pod="openstack-operators/openstack-operator-controller-manager-84d66d6d97-m2nkr" Sep 29 09:44:11 crc kubenswrapper[4779]: I0929 09:44:11.946454 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-6rzg7" event={"ID":"fd02f073-8084-4117-b444-292a0e41e629","Type":"ContainerStarted","Data":"835459aa8e01e1cf1323b6abb64b1a6e4dd335e580f4318ae109b8e485931b5e"} Sep 29 09:44:11 crc kubenswrapper[4779]: I0929 09:44:11.946491 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-6rzg7" event={"ID":"fd02f073-8084-4117-b444-292a0e41e629","Type":"ContainerStarted","Data":"be5a50078ad34f41d18c27851afb6ac648b1840c70f1ca8498df5d8d03fa18b3"} Sep 29 09:44:11 crc kubenswrapper[4779]: E0929 09:44:11.951050 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:4d08afd31dc5ded10c54a5541f514ac351e9b40a183285b3db27d0757a6354c8\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-6rzg7" podUID="fd02f073-8084-4117-b444-292a0e41e629" Sep 29 09:44:11 crc kubenswrapper[4779]: I0929 09:44:11.953994 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-5bf96cfbc4-8gzc8" event={"ID":"7817ca80-efc5-49a0-ba11-daffb918491e","Type":"ContainerStarted","Data":"578af46fe64841bb48f276b49107883b27e7c8eaae3c373c2774eaa9f08746f5"} Sep 29 09:44:11 crc kubenswrapper[4779]: I0929 09:44:11.954026 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-5bf96cfbc4-8gzc8" event={"ID":"7817ca80-efc5-49a0-ba11-daffb918491e","Type":"ContainerStarted","Data":"60f3236d893957a038be3513aad68e0e87092623cfec4ba0f8e7d7a9e8964158"} Sep 29 09:44:11 crc kubenswrapper[4779]: E0929 09:44:11.954867 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:ae6fda8cafd6c3ab5d5e9c599d15b02ace61b8eacbac4de3df50427dfab6a0c0\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-5bf96cfbc4-8gzc8" podUID="7817ca80-efc5-49a0-ba11-daffb918491e" Sep 29 09:44:11 crc kubenswrapper[4779]: I0929 09:44:11.961273 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-kgz2h" event={"ID":"90b2d814-d613-4ee6-bbce-23aad07f8d1c","Type":"ContainerStarted","Data":"f16150751fd2d5a21b40f3eb46a77ba1122bc6ec039b31c6b9236a2a154d0e0e"} Sep 29 09:44:11 crc kubenswrapper[4779]: E0929 09:44:11.969439 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:225524223bf2a7f3a4ce95958fc9ca6fdab02745fb70374e8ff5bf1ddaceda4b\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-kgz2h" podUID="90b2d814-d613-4ee6-bbce-23aad07f8d1c" Sep 29 09:44:11 crc kubenswrapper[4779]: I0929 09:44:11.981209 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-858cd69f49-7pldk" event={"ID":"a6be3c3e-f7d0-4134-8fb7-c24ee5c13c2c","Type":"ContainerStarted","Data":"ee41e717ae193fd54d617279dde995154826bf19ecfe414a61eba0c5be153f7f"} Sep 29 09:44:11 crc kubenswrapper[4779]: I0929 09:44:11.986965 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-6f4f448b94-qhpm7" event={"ID":"84dd49ae-5773-4135-886d-9f3c5a7c7b4b","Type":"ContainerStarted","Data":"7ddcd2ee3195326573c24c2cee9c4206e32e1d137d982633aaeba6c69572fd87"} Sep 29 09:44:11 crc kubenswrapper[4779]: I0929 09:44:11.990384 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-g6nvc" event={"ID":"7731544d-11b0-44ef-8a11-163cd6e9cb53","Type":"ContainerStarted","Data":"a6575d36db6b34eb38f1c54dead0712f44d6c763cf1e19f8e02faa367d500b33"} Sep 29 09:44:11 crc kubenswrapper[4779]: I0929 09:44:11.990429 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-g6nvc" event={"ID":"7731544d-11b0-44ef-8a11-163cd6e9cb53","Type":"ContainerStarted","Data":"b4a83062a3f2c91f71bbc31c35e511dadfdeac7f8128baa9a35187431b62e2ef"} Sep 29 09:44:11 crc kubenswrapper[4779]: E0929 09:44:11.991675 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:e3f947e9034a951620a76eaf41ceec95eefcef0eacb251b10993d6820d5e1af6\\\"\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-g6nvc" podUID="7731544d-11b0-44ef-8a11-163cd6e9cb53" Sep 29 09:44:12 crc kubenswrapper[4779]: I0929 09:44:12.003062 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-b86j5" event={"ID":"902219f3-1427-4cf0-9b3f-5879caf3e30d","Type":"ContainerStarted","Data":"4286ffb679861b1620bc8072b966640396c37993ec0539dfd8195ab7f0cf4f8c"} Sep 29 09:44:12 crc kubenswrapper[4779]: I0929 09:44:12.003107 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-b86j5" event={"ID":"902219f3-1427-4cf0-9b3f-5879caf3e30d","Type":"ContainerStarted","Data":"3ff93ce5ecfc81c554c2d5e6f52fdd8f7b372c77210da6114f6411a66ed4961f"} Sep 29 09:44:12 crc kubenswrapper[4779]: E0929 09:44:12.004270 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:057de94f9afa340adc34f9b25f8007d9cd2ba71bc8b5d77aac522add53b7caef\\\"\"" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-b86j5" podUID="902219f3-1427-4cf0-9b3f-5879caf3e30d" Sep 29 09:44:12 crc kubenswrapper[4779]: I0929 09:44:12.004515 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-2zb2m" event={"ID":"2520e0fa-6109-4297-b41b-ff1de862f6a1","Type":"ContainerStarted","Data":"94023add9c70b5347383578ede36bb3be161ed4f3665e64ba8743b05e0ce0ba6"} Sep 29 09:44:12 crc kubenswrapper[4779]: I0929 09:44:12.005429 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-695847bc78-gnhwx" event={"ID":"c9015e54-4a8c-4d07-ae64-74c380a50a22","Type":"ContainerStarted","Data":"1dc0714c37e023430242e6424c4020e0acaa64c77c5bbdbbe1675c462230f993"} Sep 29 09:44:12 crc kubenswrapper[4779]: I0929 09:44:12.007777 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-9fc8d5567-xxc4j" event={"ID":"31462d03-e504-4493-af67-3a5bc9eee5f7","Type":"ContainerStarted","Data":"bc5c18ff87057bc9fb142d039013cf92c5eaec813fc041f349e41137eba44d28"} Sep 29 09:44:12 crc kubenswrapper[4779]: I0929 09:44:12.010445 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-5f95c46c78-7vw66" event={"ID":"a89433a3-b8b0-4c71-ad8e-32dd617dc69e","Type":"ContainerStarted","Data":"561fdf165fdb380d38339f48bdbf922a686aca0b7fce92543a2000c1a9c294a2"} Sep 29 09:44:12 crc kubenswrapper[4779]: I0929 09:44:12.014095 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-f66b554c6-28w58" event={"ID":"b4e41036-b598-4c28-b813-3fec8f90be39","Type":"ContainerStarted","Data":"9cfe11e49f18d98bcc76781a7371153dc3072f1c83c7ad0d67f126e9ade06d50"} Sep 29 09:44:12 crc kubenswrapper[4779]: I0929 09:44:12.014130 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-f66b554c6-28w58" event={"ID":"b4e41036-b598-4c28-b813-3fec8f90be39","Type":"ContainerStarted","Data":"ebd3365704e745cd3a5fe41cef267b00fbaa5d68d9c4118ed79315d59077c6ab"} Sep 29 09:44:12 crc kubenswrapper[4779]: I0929 09:44:12.016401 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-774b97b48-pzd6k" event={"ID":"37f9c445-f9e3-47a6-9cc5-63133c13e09f","Type":"ContainerStarted","Data":"759e075316fb8d77e4e4b5f08020cec7d488b1e2a02d13529a13915ce2b6d6c3"} Sep 29 09:44:12 crc kubenswrapper[4779]: E0929 09:44:12.020134 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:a303e460aec09217f90043b8ff19c01061af003b614833b33a593df9c00ddf80\\\"\"" pod="openstack-operators/test-operator-controller-manager-f66b554c6-28w58" podUID="b4e41036-b598-4c28-b813-3fec8f90be39" Sep 29 09:44:12 crc kubenswrapper[4779]: I0929 09:44:12.067980 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-84d66d6d97-m2nkr" Sep 29 09:44:13 crc kubenswrapper[4779]: E0929 09:44:13.025579 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:a303e460aec09217f90043b8ff19c01061af003b614833b33a593df9c00ddf80\\\"\"" pod="openstack-operators/test-operator-controller-manager-f66b554c6-28w58" podUID="b4e41036-b598-4c28-b813-3fec8f90be39" Sep 29 09:44:13 crc kubenswrapper[4779]: E0929 09:44:13.025579 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:4d08afd31dc5ded10c54a5541f514ac351e9b40a183285b3db27d0757a6354c8\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-6rzg7" podUID="fd02f073-8084-4117-b444-292a0e41e629" Sep 29 09:44:13 crc kubenswrapper[4779]: E0929 09:44:13.025625 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:e3f947e9034a951620a76eaf41ceec95eefcef0eacb251b10993d6820d5e1af6\\\"\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-g6nvc" podUID="7731544d-11b0-44ef-8a11-163cd6e9cb53" Sep 29 09:44:13 crc kubenswrapper[4779]: E0929 09:44:13.025683 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:057de94f9afa340adc34f9b25f8007d9cd2ba71bc8b5d77aac522add53b7caef\\\"\"" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-b86j5" podUID="902219f3-1427-4cf0-9b3f-5879caf3e30d" Sep 29 09:44:13 crc kubenswrapper[4779]: E0929 09:44:13.027258 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:ae6fda8cafd6c3ab5d5e9c599d15b02ace61b8eacbac4de3df50427dfab6a0c0\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-5bf96cfbc4-8gzc8" podUID="7817ca80-efc5-49a0-ba11-daffb918491e" Sep 29 09:44:13 crc kubenswrapper[4779]: E0929 09:44:13.028891 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:225524223bf2a7f3a4ce95958fc9ca6fdab02745fb70374e8ff5bf1ddaceda4b\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-kgz2h" podUID="90b2d814-d613-4ee6-bbce-23aad07f8d1c" Sep 29 09:44:21 crc kubenswrapper[4779]: I0929 09:44:21.991294 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-84d66d6d97-m2nkr"] Sep 29 09:44:22 crc kubenswrapper[4779]: I0929 09:44:22.103104 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-7d74f4d695-jdvhr" event={"ID":"01b3e1fb-fbfd-48c6-a89f-cc347df3a24b","Type":"ContainerStarted","Data":"b05f60fef64524651d7cb6595f9a4bc6896c2bc43ba3a3e2475654826d9eb332"} Sep 29 09:44:22 crc kubenswrapper[4779]: I0929 09:44:22.104127 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-84d66d6d97-m2nkr" event={"ID":"533fb7be-d888-4af5-8533-4dd61056500c","Type":"ContainerStarted","Data":"790ebb37c37cf244267881c2a99166f0f0b37561a23d90db3bf9ef6702af28b4"} Sep 29 09:44:22 crc kubenswrapper[4779]: I0929 09:44:22.113584 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-67b5d44b7f-6r9tp" event={"ID":"3ebfd5a9-ed4f-4589-900e-0c44346fece4","Type":"ContainerStarted","Data":"40ed78fb856304a80df026b27e52596da93c824291a38784c4d6464ded09a748"} Sep 29 09:44:22 crc kubenswrapper[4779]: I0929 09:44:22.116382 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-6495d75b5-dwvhq" event={"ID":"02af413e-f4cb-413e-b029-5410ad2bb9ab","Type":"ContainerStarted","Data":"8a8d6e7c03dee029c508781f549cba383687e8f06922f41ee883c2eb511c0322"} Sep 29 09:44:22 crc kubenswrapper[4779]: I0929 09:44:22.120453 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-56cf9c6b99-ll5vc" event={"ID":"ad0344fd-e85f-41bc-88da-d38d5ce5add8","Type":"ContainerStarted","Data":"8e52519c4b354a3959e9630203c5ec35b33f3f4ff52abd721be9e7c93fbcf548"} Sep 29 09:44:23 crc kubenswrapper[4779]: I0929 09:44:23.167002 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-67b5d44b7f-6r9tp" event={"ID":"3ebfd5a9-ed4f-4589-900e-0c44346fece4","Type":"ContainerStarted","Data":"dbc100a03995a9358a66fc9f2aa88944b9e1dd74fe4b5edbe1aac0494134fa98"} Sep 29 09:44:23 crc kubenswrapper[4779]: I0929 09:44:23.167329 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-67b5d44b7f-6r9tp" Sep 29 09:44:23 crc kubenswrapper[4779]: I0929 09:44:23.205532 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-54d766c9f9-lbx77" event={"ID":"fa1b2d18-b1e3-410e-864f-84c2d892474a","Type":"ContainerStarted","Data":"6959f84dd9172e1e58201c1aa8e6c19a6beafbc5e9e21e771f43374afc9c838e"} Sep 29 09:44:23 crc kubenswrapper[4779]: I0929 09:44:23.210665 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-2zb2m" event={"ID":"2520e0fa-6109-4297-b41b-ff1de862f6a1","Type":"ContainerStarted","Data":"ccb68b0cf5af463eb953630c4718179b48db5491248696963c000f65c3f22416"} Sep 29 09:44:23 crc kubenswrapper[4779]: I0929 09:44:23.211110 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-67b5d44b7f-6r9tp" podStartSLOduration=2.816792547 podStartE2EDuration="14.211093486s" podCreationTimestamp="2025-09-29 09:44:09 +0000 UTC" firstStartedPulling="2025-09-29 09:44:10.157718506 +0000 UTC m=+882.139042410" lastFinishedPulling="2025-09-29 09:44:21.552019445 +0000 UTC m=+893.533343349" observedRunningTime="2025-09-29 09:44:23.20579706 +0000 UTC m=+895.187120964" watchObservedRunningTime="2025-09-29 09:44:23.211093486 +0000 UTC m=+895.192417390" Sep 29 09:44:23 crc kubenswrapper[4779]: I0929 09:44:23.211852 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-9fc8d5567-xxc4j" event={"ID":"31462d03-e504-4493-af67-3a5bc9eee5f7","Type":"ContainerStarted","Data":"c74ac27d2c4c29794128db8c28fce69eb71feff6f27514d066c7cb5f1a242594"} Sep 29 09:44:23 crc kubenswrapper[4779]: I0929 09:44:23.223727 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7bf498966c-8fwmh" event={"ID":"d2ec9063-4ca4-4280-b98c-198da389f005","Type":"ContainerStarted","Data":"a9c3dc5be9c44d83a8c665969e3d60780205e7a4af35b320aa9cf2c344bff24f"} Sep 29 09:44:23 crc kubenswrapper[4779]: I0929 09:44:23.224542 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-7bf498966c-8fwmh" Sep 29 09:44:23 crc kubenswrapper[4779]: I0929 09:44:23.245295 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-774b97b48-pzd6k" event={"ID":"37f9c445-f9e3-47a6-9cc5-63133c13e09f","Type":"ContainerStarted","Data":"7ad907b0ad526fdf023b9bdfe34b26a2cfae4d942652e9890312b09ceff95be2"} Sep 29 09:44:23 crc kubenswrapper[4779]: I0929 09:44:23.264344 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-6f4f448b94-qhpm7" event={"ID":"84dd49ae-5773-4135-886d-9f3c5a7c7b4b","Type":"ContainerStarted","Data":"415633597c417d582c07f019e9d2d7d2c044aec9972ed0d02c7bba900319eb80"} Sep 29 09:44:23 crc kubenswrapper[4779]: I0929 09:44:23.294429 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-858cd69f49-7pldk" event={"ID":"a6be3c3e-f7d0-4134-8fb7-c24ee5c13c2c","Type":"ContainerStarted","Data":"2c177eb088592cd0efed27cca34df0a3f3d9bf864140ace24b54b3c3c9ca7f2e"} Sep 29 09:44:23 crc kubenswrapper[4779]: I0929 09:44:23.295120 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-858cd69f49-7pldk" Sep 29 09:44:23 crc kubenswrapper[4779]: I0929 09:44:23.322187 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-695847bc78-gnhwx" event={"ID":"c9015e54-4a8c-4d07-ae64-74c380a50a22","Type":"ContainerStarted","Data":"9fb882daa75dc48c0e7bc4cbc1df1a6672c86868b5f284bb20f4993b9582de68"} Sep 29 09:44:23 crc kubenswrapper[4779]: I0929 09:44:23.323470 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-8ff95898-z2ltj" event={"ID":"618315d7-82b7-469d-ba71-a3fbb71ae08c","Type":"ContainerStarted","Data":"efb7914482e73aa63175c45ad0ed3fb08dbc47a174a03488cc56637722b93531"} Sep 29 09:44:23 crc kubenswrapper[4779]: I0929 09:44:23.324411 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-5f95c46c78-7vw66" event={"ID":"a89433a3-b8b0-4c71-ad8e-32dd617dc69e","Type":"ContainerStarted","Data":"07d04d560e35e4f2365e03d2dd18d005a5fbc8eecac446c942f78009e7a91442"} Sep 29 09:44:23 crc kubenswrapper[4779]: I0929 09:44:23.341116 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-7bf498966c-8fwmh" podStartSLOduration=3.397283248 podStartE2EDuration="14.341093007s" podCreationTimestamp="2025-09-29 09:44:09 +0000 UTC" firstStartedPulling="2025-09-29 09:44:10.777061059 +0000 UTC m=+882.758384963" lastFinishedPulling="2025-09-29 09:44:21.720870828 +0000 UTC m=+893.702194722" observedRunningTime="2025-09-29 09:44:23.313501036 +0000 UTC m=+895.294824940" watchObservedRunningTime="2025-09-29 09:44:23.341093007 +0000 UTC m=+895.322416911" Sep 29 09:44:23 crc kubenswrapper[4779]: I0929 09:44:23.342164 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-748c574d75-cssz9" event={"ID":"a5133455-fda2-4b98-9465-8421aae72e9c","Type":"ContainerStarted","Data":"dd0360a1fd583a49cb98bd2e579589b06650139a8ecfc9c9008e7455ffd1f0cb"} Sep 29 09:44:23 crc kubenswrapper[4779]: I0929 09:44:23.342845 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-748c574d75-cssz9" Sep 29 09:44:23 crc kubenswrapper[4779]: I0929 09:44:23.384084 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-6495d75b5-dwvhq" event={"ID":"02af413e-f4cb-413e-b029-5410ad2bb9ab","Type":"ContainerStarted","Data":"58c53bcce49c52f438db733ad39dbe9a887da3344ab9f5484a6bc458743df505"} Sep 29 09:44:23 crc kubenswrapper[4779]: I0929 09:44:23.384779 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-6495d75b5-dwvhq" Sep 29 09:44:23 crc kubenswrapper[4779]: I0929 09:44:23.423200 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-84d66d6d97-m2nkr" event={"ID":"533fb7be-d888-4af5-8533-4dd61056500c","Type":"ContainerStarted","Data":"7fb3f92fbac6f70b46868e3cd73e4f9d12b27bc223310cb93b208e59dd04cfef"} Sep 29 09:44:23 crc kubenswrapper[4779]: I0929 09:44:23.424721 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-858cd69f49-7pldk" podStartSLOduration=4.465815523 podStartE2EDuration="14.424700204s" podCreationTimestamp="2025-09-29 09:44:09 +0000 UTC" firstStartedPulling="2025-09-29 09:44:11.76481144 +0000 UTC m=+883.746135344" lastFinishedPulling="2025-09-29 09:44:21.723696111 +0000 UTC m=+893.705020025" observedRunningTime="2025-09-29 09:44:23.345097195 +0000 UTC m=+895.326421099" watchObservedRunningTime="2025-09-29 09:44:23.424700204 +0000 UTC m=+895.406024108" Sep 29 09:44:23 crc kubenswrapper[4779]: I0929 09:44:23.453038 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-56cf9c6b99-ll5vc" event={"ID":"ad0344fd-e85f-41bc-88da-d38d5ce5add8","Type":"ContainerStarted","Data":"e2e142a433b5df078c13d1027dbd39b8e955fa35e6a6dacac3c0762bbf58700d"} Sep 29 09:44:23 crc kubenswrapper[4779]: I0929 09:44:23.454124 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-56cf9c6b99-ll5vc" Sep 29 09:44:23 crc kubenswrapper[4779]: I0929 09:44:23.468467 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-687b9cf756-d7n87" event={"ID":"2bf3c066-8608-4d04-8c2a-7570b23edebe","Type":"ContainerStarted","Data":"99a1af650fbff2377ee1900786697f7e8e827c11e146930690d48edb68b8c654"} Sep 29 09:44:23 crc kubenswrapper[4779]: I0929 09:44:23.474853 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-6495d75b5-dwvhq" podStartSLOduration=2.899673743 podStartE2EDuration="14.474838218s" podCreationTimestamp="2025-09-29 09:44:09 +0000 UTC" firstStartedPulling="2025-09-29 09:44:10.140955603 +0000 UTC m=+882.122279507" lastFinishedPulling="2025-09-29 09:44:21.716120078 +0000 UTC m=+893.697443982" observedRunningTime="2025-09-29 09:44:23.452872682 +0000 UTC m=+895.434196586" watchObservedRunningTime="2025-09-29 09:44:23.474838218 +0000 UTC m=+895.456162122" Sep 29 09:44:23 crc kubenswrapper[4779]: I0929 09:44:23.478417 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-748c574d75-cssz9" podStartSLOduration=3.01286506 podStartE2EDuration="14.478400503s" podCreationTimestamp="2025-09-29 09:44:09 +0000 UTC" firstStartedPulling="2025-09-29 09:44:10.19564322 +0000 UTC m=+882.176967124" lastFinishedPulling="2025-09-29 09:44:21.661178663 +0000 UTC m=+893.642502567" observedRunningTime="2025-09-29 09:44:23.423266492 +0000 UTC m=+895.404590396" watchObservedRunningTime="2025-09-29 09:44:23.478400503 +0000 UTC m=+895.459724407" Sep 29 09:44:24 crc kubenswrapper[4779]: I0929 09:44:24.478577 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7bf498966c-8fwmh" event={"ID":"d2ec9063-4ca4-4280-b98c-198da389f005","Type":"ContainerStarted","Data":"48398a9efb13f058790fb6dc02c4e1112364aa637418007422931e2012eceeaa"} Sep 29 09:44:24 crc kubenswrapper[4779]: I0929 09:44:24.480497 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-748c574d75-cssz9" event={"ID":"a5133455-fda2-4b98-9465-8421aae72e9c","Type":"ContainerStarted","Data":"b57120882dfd99a19bc713e129d08f6cf9f71834c4bd9f710fd7d4a600cbc13a"} Sep 29 09:44:24 crc kubenswrapper[4779]: I0929 09:44:24.482194 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-858cd69f49-7pldk" event={"ID":"a6be3c3e-f7d0-4134-8fb7-c24ee5c13c2c","Type":"ContainerStarted","Data":"1a61502d19470a0794087fe22a8339fd51f317dfcd33a0637bdf539cf097c1fe"} Sep 29 09:44:24 crc kubenswrapper[4779]: I0929 09:44:24.483770 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-2zb2m" event={"ID":"2520e0fa-6109-4297-b41b-ff1de862f6a1","Type":"ContainerStarted","Data":"2e88a131bb434a78bf0bd0f3cbcc75c4e1a284ab48344694bb15dff24b00ca5f"} Sep 29 09:44:27 crc kubenswrapper[4779]: I0929 09:44:27.506246 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-687b9cf756-d7n87" event={"ID":"2bf3c066-8608-4d04-8c2a-7570b23edebe","Type":"ContainerStarted","Data":"e0af1d3acc6904e7e596bc12f291e63798932b05dba3cc5b6758bb12388e2c5a"} Sep 29 09:44:27 crc kubenswrapper[4779]: I0929 09:44:27.508731 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-8ff95898-z2ltj" event={"ID":"618315d7-82b7-469d-ba71-a3fbb71ae08c","Type":"ContainerStarted","Data":"c1869e076d1bcd07012f5dc7036c0981731badc5a97598b179733174ee2b1655"} Sep 29 09:44:27 crc kubenswrapper[4779]: I0929 09:44:27.510683 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-5f95c46c78-7vw66" event={"ID":"a89433a3-b8b0-4c71-ad8e-32dd617dc69e","Type":"ContainerStarted","Data":"6939aa91ba39deb53290f9fd3a73d2735661ae444bf462f117189959d31aca2b"} Sep 29 09:44:27 crc kubenswrapper[4779]: I0929 09:44:27.511052 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-2zb2m" Sep 29 09:44:27 crc kubenswrapper[4779]: I0929 09:44:27.513781 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-2zb2m" Sep 29 09:44:27 crc kubenswrapper[4779]: I0929 09:44:27.536326 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-2zb2m" podStartSLOduration=8.049169242 podStartE2EDuration="18.536306279s" podCreationTimestamp="2025-09-29 09:44:09 +0000 UTC" firstStartedPulling="2025-09-29 09:44:11.174066797 +0000 UTC m=+883.155390701" lastFinishedPulling="2025-09-29 09:44:21.661203824 +0000 UTC m=+893.642527738" observedRunningTime="2025-09-29 09:44:27.532090065 +0000 UTC m=+899.513413979" watchObservedRunningTime="2025-09-29 09:44:27.536306279 +0000 UTC m=+899.517630193" Sep 29 09:44:27 crc kubenswrapper[4779]: I0929 09:44:27.537611 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-56cf9c6b99-ll5vc" podStartSLOduration=7.704546893 podStartE2EDuration="18.537600067s" podCreationTimestamp="2025-09-29 09:44:09 +0000 UTC" firstStartedPulling="2025-09-29 09:44:10.784323372 +0000 UTC m=+882.765647286" lastFinishedPulling="2025-09-29 09:44:21.617376556 +0000 UTC m=+893.598700460" observedRunningTime="2025-09-29 09:44:23.499132832 +0000 UTC m=+895.480456736" watchObservedRunningTime="2025-09-29 09:44:27.537600067 +0000 UTC m=+899.518923981" Sep 29 09:44:28 crc kubenswrapper[4779]: I0929 09:44:28.518436 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-54d766c9f9-lbx77" event={"ID":"fa1b2d18-b1e3-410e-864f-84c2d892474a","Type":"ContainerStarted","Data":"b96e2b207b472c8bfd6ff4d322977d1b2e848376732cb49d7e102fb17a6f5deb"} Sep 29 09:44:28 crc kubenswrapper[4779]: I0929 09:44:28.519348 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-54d766c9f9-lbx77" Sep 29 09:44:28 crc kubenswrapper[4779]: I0929 09:44:28.521618 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-695847bc78-gnhwx" event={"ID":"c9015e54-4a8c-4d07-ae64-74c380a50a22","Type":"ContainerStarted","Data":"040a72a62d849c7fac9c2e08936c206ddb87964a965856dbdd63c9ddf012c651"} Sep 29 09:44:28 crc kubenswrapper[4779]: I0929 09:44:28.522432 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-695847bc78-gnhwx" Sep 29 09:44:28 crc kubenswrapper[4779]: I0929 09:44:28.522478 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-54d766c9f9-lbx77" Sep 29 09:44:28 crc kubenswrapper[4779]: I0929 09:44:28.533493 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-695847bc78-gnhwx" Sep 29 09:44:28 crc kubenswrapper[4779]: I0929 09:44:28.545213 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-54d766c9f9-lbx77" podStartSLOduration=8.643499232 podStartE2EDuration="19.545194602s" podCreationTimestamp="2025-09-29 09:44:09 +0000 UTC" firstStartedPulling="2025-09-29 09:44:10.816111597 +0000 UTC m=+882.797435501" lastFinishedPulling="2025-09-29 09:44:21.717806967 +0000 UTC m=+893.699130871" observedRunningTime="2025-09-29 09:44:28.543393939 +0000 UTC m=+900.524717853" watchObservedRunningTime="2025-09-29 09:44:28.545194602 +0000 UTC m=+900.526518506" Sep 29 09:44:28 crc kubenswrapper[4779]: I0929 09:44:28.546085 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-9fc8d5567-xxc4j" event={"ID":"31462d03-e504-4493-af67-3a5bc9eee5f7","Type":"ContainerStarted","Data":"8ad1fce39194d39384c04d462f36172ff5ea397cb2716a106b3a10a45b7e6ca5"} Sep 29 09:44:28 crc kubenswrapper[4779]: I0929 09:44:28.546369 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-9fc8d5567-xxc4j" Sep 29 09:44:28 crc kubenswrapper[4779]: I0929 09:44:28.548219 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-9fc8d5567-xxc4j" Sep 29 09:44:28 crc kubenswrapper[4779]: I0929 09:44:28.548486 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-7d74f4d695-jdvhr" event={"ID":"01b3e1fb-fbfd-48c6-a89f-cc347df3a24b","Type":"ContainerStarted","Data":"2e62db5eea928f146a3351848da6ec74afcb9adf3e9148bea715618f23f49b0f"} Sep 29 09:44:28 crc kubenswrapper[4779]: I0929 09:44:28.549209 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-7d74f4d695-jdvhr" Sep 29 09:44:28 crc kubenswrapper[4779]: I0929 09:44:28.550601 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-7d74f4d695-jdvhr" Sep 29 09:44:28 crc kubenswrapper[4779]: I0929 09:44:28.550736 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-84d66d6d97-m2nkr" event={"ID":"533fb7be-d888-4af5-8533-4dd61056500c","Type":"ContainerStarted","Data":"0e22b12e5b0ee48a1a6ed44aeced39cbcfc108e6b91832a9278006228f1ad3d3"} Sep 29 09:44:28 crc kubenswrapper[4779]: I0929 09:44:28.554245 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-774b97b48-pzd6k" event={"ID":"37f9c445-f9e3-47a6-9cc5-63133c13e09f","Type":"ContainerStarted","Data":"922c96488473b005ef64e80105d57e57acb8f48dfc226479ba59318a516ed83d"} Sep 29 09:44:28 crc kubenswrapper[4779]: I0929 09:44:28.556302 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-774b97b48-pzd6k" Sep 29 09:44:28 crc kubenswrapper[4779]: I0929 09:44:28.557641 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-774b97b48-pzd6k" Sep 29 09:44:28 crc kubenswrapper[4779]: I0929 09:44:28.562500 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-6f4f448b94-qhpm7" event={"ID":"84dd49ae-5773-4135-886d-9f3c5a7c7b4b","Type":"ContainerStarted","Data":"fa787b54a519baa66c449f3686745285b0725293e9d3e3f9704e20ccfa502156"} Sep 29 09:44:28 crc kubenswrapper[4779]: I0929 09:44:28.562549 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-6f4f448b94-qhpm7" Sep 29 09:44:28 crc kubenswrapper[4779]: I0929 09:44:28.562561 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-8ff95898-z2ltj" Sep 29 09:44:28 crc kubenswrapper[4779]: I0929 09:44:28.563302 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-687b9cf756-d7n87" Sep 29 09:44:28 crc kubenswrapper[4779]: I0929 09:44:28.566589 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-6f4f448b94-qhpm7" Sep 29 09:44:28 crc kubenswrapper[4779]: I0929 09:44:28.570761 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-687b9cf756-d7n87" Sep 29 09:44:28 crc kubenswrapper[4779]: I0929 09:44:28.570990 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-8ff95898-z2ltj" Sep 29 09:44:28 crc kubenswrapper[4779]: I0929 09:44:28.598500 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-695847bc78-gnhwx" podStartSLOduration=8.764861598 podStartE2EDuration="19.598482288s" podCreationTimestamp="2025-09-29 09:44:09 +0000 UTC" firstStartedPulling="2025-09-29 09:44:10.887469184 +0000 UTC m=+882.868793088" lastFinishedPulling="2025-09-29 09:44:21.721089874 +0000 UTC m=+893.702413778" observedRunningTime="2025-09-29 09:44:28.581242101 +0000 UTC m=+900.562566015" watchObservedRunningTime="2025-09-29 09:44:28.598482288 +0000 UTC m=+900.579806192" Sep 29 09:44:28 crc kubenswrapper[4779]: I0929 09:44:28.610978 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-5f95c46c78-7vw66" podStartSLOduration=9.073630423000001 podStartE2EDuration="19.610961015s" podCreationTimestamp="2025-09-29 09:44:09 +0000 UTC" firstStartedPulling="2025-09-29 09:44:11.18402645 +0000 UTC m=+883.165350354" lastFinishedPulling="2025-09-29 09:44:21.721357042 +0000 UTC m=+893.702680946" observedRunningTime="2025-09-29 09:44:28.602705762 +0000 UTC m=+900.584029666" watchObservedRunningTime="2025-09-29 09:44:28.610961015 +0000 UTC m=+900.592284919" Sep 29 09:44:28 crc kubenswrapper[4779]: I0929 09:44:28.681949 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-687b9cf756-d7n87" podStartSLOduration=8.788565304 podStartE2EDuration="19.68192789s" podCreationTimestamp="2025-09-29 09:44:09 +0000 UTC" firstStartedPulling="2025-09-29 09:44:10.821991389 +0000 UTC m=+882.803315303" lastFinishedPulling="2025-09-29 09:44:21.715353985 +0000 UTC m=+893.696677889" observedRunningTime="2025-09-29 09:44:28.628360326 +0000 UTC m=+900.609684230" watchObservedRunningTime="2025-09-29 09:44:28.68192789 +0000 UTC m=+900.663251794" Sep 29 09:44:28 crc kubenswrapper[4779]: I0929 09:44:28.684576 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-7d74f4d695-jdvhr" podStartSLOduration=8.326076792 podStartE2EDuration="19.684556378s" podCreationTimestamp="2025-09-29 09:44:09 +0000 UTC" firstStartedPulling="2025-09-29 09:44:10.361871256 +0000 UTC m=+882.343195160" lastFinishedPulling="2025-09-29 09:44:21.720350842 +0000 UTC m=+893.701674746" observedRunningTime="2025-09-29 09:44:28.682308731 +0000 UTC m=+900.663632645" watchObservedRunningTime="2025-09-29 09:44:28.684556378 +0000 UTC m=+900.665880282" Sep 29 09:44:28 crc kubenswrapper[4779]: I0929 09:44:28.710447 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-774b97b48-pzd6k" podStartSLOduration=9.17151446 podStartE2EDuration="19.710424578s" podCreationTimestamp="2025-09-29 09:44:09 +0000 UTC" firstStartedPulling="2025-09-29 09:44:11.185290837 +0000 UTC m=+883.166614741" lastFinishedPulling="2025-09-29 09:44:21.724200955 +0000 UTC m=+893.705524859" observedRunningTime="2025-09-29 09:44:28.703285568 +0000 UTC m=+900.684609482" watchObservedRunningTime="2025-09-29 09:44:28.710424578 +0000 UTC m=+900.691748482" Sep 29 09:44:28 crc kubenswrapper[4779]: I0929 09:44:28.739180 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-84d66d6d97-m2nkr" podStartSLOduration=19.739155502 podStartE2EDuration="19.739155502s" podCreationTimestamp="2025-09-29 09:44:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:44:28.734679001 +0000 UTC m=+900.716002905" watchObservedRunningTime="2025-09-29 09:44:28.739155502 +0000 UTC m=+900.720479406" Sep 29 09:44:28 crc kubenswrapper[4779]: I0929 09:44:28.752297 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-6f4f448b94-qhpm7" podStartSLOduration=9.206448707 podStartE2EDuration="19.752282978s" podCreationTimestamp="2025-09-29 09:44:09 +0000 UTC" firstStartedPulling="2025-09-29 09:44:11.184285648 +0000 UTC m=+883.165609552" lastFinishedPulling="2025-09-29 09:44:21.730119909 +0000 UTC m=+893.711443823" observedRunningTime="2025-09-29 09:44:28.750897777 +0000 UTC m=+900.732221691" watchObservedRunningTime="2025-09-29 09:44:28.752282978 +0000 UTC m=+900.733606882" Sep 29 09:44:28 crc kubenswrapper[4779]: I0929 09:44:28.775937 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-8ff95898-z2ltj" podStartSLOduration=8.524868744 podStartE2EDuration="19.775920653s" podCreationTimestamp="2025-09-29 09:44:09 +0000 UTC" firstStartedPulling="2025-09-29 09:44:10.471155568 +0000 UTC m=+882.452479472" lastFinishedPulling="2025-09-29 09:44:21.722207467 +0000 UTC m=+893.703531381" observedRunningTime="2025-09-29 09:44:28.770001489 +0000 UTC m=+900.751325403" watchObservedRunningTime="2025-09-29 09:44:28.775920653 +0000 UTC m=+900.757244557" Sep 29 09:44:28 crc kubenswrapper[4779]: I0929 09:44:28.796133 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-9fc8d5567-xxc4j" podStartSLOduration=8.96672707 podStartE2EDuration="19.796111586s" podCreationTimestamp="2025-09-29 09:44:09 +0000 UTC" firstStartedPulling="2025-09-29 09:44:10.887113913 +0000 UTC m=+882.868437817" lastFinishedPulling="2025-09-29 09:44:21.716498429 +0000 UTC m=+893.697822333" observedRunningTime="2025-09-29 09:44:28.786893835 +0000 UTC m=+900.768217739" watchObservedRunningTime="2025-09-29 09:44:28.796111586 +0000 UTC m=+900.777435490" Sep 29 09:44:29 crc kubenswrapper[4779]: I0929 09:44:29.454751 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-6495d75b5-dwvhq" Sep 29 09:44:29 crc kubenswrapper[4779]: I0929 09:44:29.474012 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-748c574d75-cssz9" Sep 29 09:44:29 crc kubenswrapper[4779]: I0929 09:44:29.529663 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-67b5d44b7f-6r9tp" Sep 29 09:44:29 crc kubenswrapper[4779]: I0929 09:44:29.567292 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-84d66d6d97-m2nkr" Sep 29 09:44:29 crc kubenswrapper[4779]: I0929 09:44:29.572144 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-84d66d6d97-m2nkr" Sep 29 09:44:29 crc kubenswrapper[4779]: I0929 09:44:29.771819 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-7bf498966c-8fwmh" Sep 29 09:44:29 crc kubenswrapper[4779]: I0929 09:44:29.805366 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-56cf9c6b99-ll5vc" Sep 29 09:44:29 crc kubenswrapper[4779]: I0929 09:44:29.947979 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-5f95c46c78-7vw66" Sep 29 09:44:29 crc kubenswrapper[4779]: I0929 09:44:29.949941 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-5f95c46c78-7vw66" Sep 29 09:44:31 crc kubenswrapper[4779]: I0929 09:44:31.216506 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-858cd69f49-7pldk" Sep 29 09:44:34 crc kubenswrapper[4779]: I0929 09:44:34.622148 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-g6nvc" event={"ID":"7731544d-11b0-44ef-8a11-163cd6e9cb53","Type":"ContainerStarted","Data":"d7b5fa8e9b22f59577d6c6e7521318ee57212b3a101d842a7d771f8875fe2f89"} Sep 29 09:44:34 crc kubenswrapper[4779]: I0929 09:44:34.622807 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-g6nvc" Sep 29 09:44:34 crc kubenswrapper[4779]: I0929 09:44:34.623974 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-6rzg7" event={"ID":"fd02f073-8084-4117-b444-292a0e41e629","Type":"ContainerStarted","Data":"eaf7225bf36199931adda8d77173becc45b3165d9d85d4e16351a8a5d19b5349"} Sep 29 09:44:34 crc kubenswrapper[4779]: I0929 09:44:34.624140 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-6rzg7" Sep 29 09:44:34 crc kubenswrapper[4779]: I0929 09:44:34.652403 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-g6nvc" podStartSLOduration=2.570797638 podStartE2EDuration="25.652389198s" podCreationTimestamp="2025-09-29 09:44:09 +0000 UTC" firstStartedPulling="2025-09-29 09:44:11.242855789 +0000 UTC m=+883.224179683" lastFinishedPulling="2025-09-29 09:44:34.324447349 +0000 UTC m=+906.305771243" observedRunningTime="2025-09-29 09:44:34.64974163 +0000 UTC m=+906.631065554" watchObservedRunningTime="2025-09-29 09:44:34.652389198 +0000 UTC m=+906.633713102" Sep 29 09:44:34 crc kubenswrapper[4779]: I0929 09:44:34.684351 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-6rzg7" podStartSLOduration=2.578385551 podStartE2EDuration="25.684330377s" podCreationTimestamp="2025-09-29 09:44:09 +0000 UTC" firstStartedPulling="2025-09-29 09:44:11.218535564 +0000 UTC m=+883.199859478" lastFinishedPulling="2025-09-29 09:44:34.32448041 +0000 UTC m=+906.305804304" observedRunningTime="2025-09-29 09:44:34.679124494 +0000 UTC m=+906.660448418" watchObservedRunningTime="2025-09-29 09:44:34.684330377 +0000 UTC m=+906.665654281" Sep 29 09:44:35 crc kubenswrapper[4779]: I0929 09:44:35.631429 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-b86j5" event={"ID":"902219f3-1427-4cf0-9b3f-5879caf3e30d","Type":"ContainerStarted","Data":"70b522f2823d03fa693921300db83e44c961f6cbe2082953f48e1ab6860c7506"} Sep 29 09:44:35 crc kubenswrapper[4779]: I0929 09:44:35.632360 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-b86j5" Sep 29 09:44:35 crc kubenswrapper[4779]: I0929 09:44:35.633181 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-5bf96cfbc4-8gzc8" event={"ID":"7817ca80-efc5-49a0-ba11-daffb918491e","Type":"ContainerStarted","Data":"921ab04893d5bce55b6bfb0f74d6dcdee7573e7f7272d428b7c45df7ce4c834b"} Sep 29 09:44:35 crc kubenswrapper[4779]: I0929 09:44:35.633383 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-5bf96cfbc4-8gzc8" Sep 29 09:44:35 crc kubenswrapper[4779]: I0929 09:44:35.634719 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-kgz2h" event={"ID":"90b2d814-d613-4ee6-bbce-23aad07f8d1c","Type":"ContainerStarted","Data":"b982de6bbb219d5b7c1dc4582a4d7e78281b074cfb363470c29dc71e63b12419"} Sep 29 09:44:35 crc kubenswrapper[4779]: I0929 09:44:35.636473 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-f66b554c6-28w58" event={"ID":"b4e41036-b598-4c28-b813-3fec8f90be39","Type":"ContainerStarted","Data":"7554accea3640686a5ecc074608f20615819f20dd719ed913249da26ed1dda01"} Sep 29 09:44:35 crc kubenswrapper[4779]: I0929 09:44:35.648995 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-b86j5" podStartSLOduration=3.500248016 podStartE2EDuration="26.648978499s" podCreationTimestamp="2025-09-29 09:44:09 +0000 UTC" firstStartedPulling="2025-09-29 09:44:11.209125748 +0000 UTC m=+883.190449652" lastFinishedPulling="2025-09-29 09:44:34.357856231 +0000 UTC m=+906.339180135" observedRunningTime="2025-09-29 09:44:35.647413173 +0000 UTC m=+907.628737087" watchObservedRunningTime="2025-09-29 09:44:35.648978499 +0000 UTC m=+907.630302403" Sep 29 09:44:35 crc kubenswrapper[4779]: I0929 09:44:35.669643 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-5bf96cfbc4-8gzc8" podStartSLOduration=3.530166855 podStartE2EDuration="26.669626756s" podCreationTimestamp="2025-09-29 09:44:09 +0000 UTC" firstStartedPulling="2025-09-29 09:44:11.185768271 +0000 UTC m=+883.167092175" lastFinishedPulling="2025-09-29 09:44:34.325228172 +0000 UTC m=+906.306552076" observedRunningTime="2025-09-29 09:44:35.668154612 +0000 UTC m=+907.649478526" watchObservedRunningTime="2025-09-29 09:44:35.669626756 +0000 UTC m=+907.650950660" Sep 29 09:44:35 crc kubenswrapper[4779]: I0929 09:44:35.683602 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-kgz2h" podStartSLOduration=3.43371969 podStartE2EDuration="26.683586126s" podCreationTimestamp="2025-09-29 09:44:09 +0000 UTC" firstStartedPulling="2025-09-29 09:44:11.243466027 +0000 UTC m=+883.224789931" lastFinishedPulling="2025-09-29 09:44:34.493332463 +0000 UTC m=+906.474656367" observedRunningTime="2025-09-29 09:44:35.679372302 +0000 UTC m=+907.660696206" watchObservedRunningTime="2025-09-29 09:44:35.683586126 +0000 UTC m=+907.664910030" Sep 29 09:44:39 crc kubenswrapper[4779]: I0929 09:44:39.888019 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-b86j5" Sep 29 09:44:39 crc kubenswrapper[4779]: I0929 09:44:39.907775 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-f66b554c6-28w58" podStartSLOduration=7.79185391 podStartE2EDuration="30.907748949s" podCreationTimestamp="2025-09-29 09:44:09 +0000 UTC" firstStartedPulling="2025-09-29 09:44:11.208572591 +0000 UTC m=+883.189896495" lastFinishedPulling="2025-09-29 09:44:34.32446763 +0000 UTC m=+906.305791534" observedRunningTime="2025-09-29 09:44:35.696885807 +0000 UTC m=+907.678209711" watchObservedRunningTime="2025-09-29 09:44:39.907748949 +0000 UTC m=+911.889072853" Sep 29 09:44:39 crc kubenswrapper[4779]: I0929 09:44:39.913772 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-6rzg7" Sep 29 09:44:40 crc kubenswrapper[4779]: I0929 09:44:40.072146 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-5bf96cfbc4-8gzc8" Sep 29 09:44:40 crc kubenswrapper[4779]: I0929 09:44:40.184702 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-f66b554c6-28w58" Sep 29 09:44:40 crc kubenswrapper[4779]: I0929 09:44:40.186752 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-f66b554c6-28w58" Sep 29 09:44:40 crc kubenswrapper[4779]: I0929 09:44:40.534029 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-g6nvc" Sep 29 09:44:59 crc kubenswrapper[4779]: I0929 09:44:59.322049 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7687d5c5c-92rcn"] Sep 29 09:44:59 crc kubenswrapper[4779]: I0929 09:44:59.324050 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7687d5c5c-92rcn" Sep 29 09:44:59 crc kubenswrapper[4779]: I0929 09:44:59.326472 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Sep 29 09:44:59 crc kubenswrapper[4779]: I0929 09:44:59.326537 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-9fm97" Sep 29 09:44:59 crc kubenswrapper[4779]: I0929 09:44:59.329138 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Sep 29 09:44:59 crc kubenswrapper[4779]: I0929 09:44:59.329179 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Sep 29 09:44:59 crc kubenswrapper[4779]: I0929 09:44:59.344216 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7687d5c5c-92rcn"] Sep 29 09:44:59 crc kubenswrapper[4779]: I0929 09:44:59.366794 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sx8b8\" (UniqueName: \"kubernetes.io/projected/39c36338-59ac-4873-9218-f99df307efc3-kube-api-access-sx8b8\") pod \"dnsmasq-dns-7687d5c5c-92rcn\" (UID: \"39c36338-59ac-4873-9218-f99df307efc3\") " pod="openstack/dnsmasq-dns-7687d5c5c-92rcn" Sep 29 09:44:59 crc kubenswrapper[4779]: I0929 09:44:59.366836 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39c36338-59ac-4873-9218-f99df307efc3-config\") pod \"dnsmasq-dns-7687d5c5c-92rcn\" (UID: \"39c36338-59ac-4873-9218-f99df307efc3\") " pod="openstack/dnsmasq-dns-7687d5c5c-92rcn" Sep 29 09:44:59 crc kubenswrapper[4779]: I0929 09:44:59.404574 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5fb658dc65-qf22m"] Sep 29 09:44:59 crc kubenswrapper[4779]: I0929 09:44:59.432629 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5fb658dc65-qf22m" Sep 29 09:44:59 crc kubenswrapper[4779]: I0929 09:44:59.435753 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Sep 29 09:44:59 crc kubenswrapper[4779]: I0929 09:44:59.448372 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5fb658dc65-qf22m"] Sep 29 09:44:59 crc kubenswrapper[4779]: I0929 09:44:59.468562 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2edf572a-5bab-41c3-8ff8-a668fb8a5893-config\") pod \"dnsmasq-dns-5fb658dc65-qf22m\" (UID: \"2edf572a-5bab-41c3-8ff8-a668fb8a5893\") " pod="openstack/dnsmasq-dns-5fb658dc65-qf22m" Sep 29 09:44:59 crc kubenswrapper[4779]: I0929 09:44:59.468631 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dsmzl\" (UniqueName: \"kubernetes.io/projected/2edf572a-5bab-41c3-8ff8-a668fb8a5893-kube-api-access-dsmzl\") pod \"dnsmasq-dns-5fb658dc65-qf22m\" (UID: \"2edf572a-5bab-41c3-8ff8-a668fb8a5893\") " pod="openstack/dnsmasq-dns-5fb658dc65-qf22m" Sep 29 09:44:59 crc kubenswrapper[4779]: I0929 09:44:59.468784 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sx8b8\" (UniqueName: \"kubernetes.io/projected/39c36338-59ac-4873-9218-f99df307efc3-kube-api-access-sx8b8\") pod \"dnsmasq-dns-7687d5c5c-92rcn\" (UID: \"39c36338-59ac-4873-9218-f99df307efc3\") " pod="openstack/dnsmasq-dns-7687d5c5c-92rcn" Sep 29 09:44:59 crc kubenswrapper[4779]: I0929 09:44:59.468818 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39c36338-59ac-4873-9218-f99df307efc3-config\") pod \"dnsmasq-dns-7687d5c5c-92rcn\" (UID: \"39c36338-59ac-4873-9218-f99df307efc3\") " pod="openstack/dnsmasq-dns-7687d5c5c-92rcn" Sep 29 09:44:59 crc kubenswrapper[4779]: I0929 09:44:59.468881 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2edf572a-5bab-41c3-8ff8-a668fb8a5893-dns-svc\") pod \"dnsmasq-dns-5fb658dc65-qf22m\" (UID: \"2edf572a-5bab-41c3-8ff8-a668fb8a5893\") " pod="openstack/dnsmasq-dns-5fb658dc65-qf22m" Sep 29 09:44:59 crc kubenswrapper[4779]: I0929 09:44:59.470383 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39c36338-59ac-4873-9218-f99df307efc3-config\") pod \"dnsmasq-dns-7687d5c5c-92rcn\" (UID: \"39c36338-59ac-4873-9218-f99df307efc3\") " pod="openstack/dnsmasq-dns-7687d5c5c-92rcn" Sep 29 09:44:59 crc kubenswrapper[4779]: I0929 09:44:59.489346 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sx8b8\" (UniqueName: \"kubernetes.io/projected/39c36338-59ac-4873-9218-f99df307efc3-kube-api-access-sx8b8\") pod \"dnsmasq-dns-7687d5c5c-92rcn\" (UID: \"39c36338-59ac-4873-9218-f99df307efc3\") " pod="openstack/dnsmasq-dns-7687d5c5c-92rcn" Sep 29 09:44:59 crc kubenswrapper[4779]: I0929 09:44:59.570155 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2edf572a-5bab-41c3-8ff8-a668fb8a5893-dns-svc\") pod \"dnsmasq-dns-5fb658dc65-qf22m\" (UID: \"2edf572a-5bab-41c3-8ff8-a668fb8a5893\") " pod="openstack/dnsmasq-dns-5fb658dc65-qf22m" Sep 29 09:44:59 crc kubenswrapper[4779]: I0929 09:44:59.570251 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2edf572a-5bab-41c3-8ff8-a668fb8a5893-config\") pod \"dnsmasq-dns-5fb658dc65-qf22m\" (UID: \"2edf572a-5bab-41c3-8ff8-a668fb8a5893\") " pod="openstack/dnsmasq-dns-5fb658dc65-qf22m" Sep 29 09:44:59 crc kubenswrapper[4779]: I0929 09:44:59.570285 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dsmzl\" (UniqueName: \"kubernetes.io/projected/2edf572a-5bab-41c3-8ff8-a668fb8a5893-kube-api-access-dsmzl\") pod \"dnsmasq-dns-5fb658dc65-qf22m\" (UID: \"2edf572a-5bab-41c3-8ff8-a668fb8a5893\") " pod="openstack/dnsmasq-dns-5fb658dc65-qf22m" Sep 29 09:44:59 crc kubenswrapper[4779]: I0929 09:44:59.571571 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2edf572a-5bab-41c3-8ff8-a668fb8a5893-config\") pod \"dnsmasq-dns-5fb658dc65-qf22m\" (UID: \"2edf572a-5bab-41c3-8ff8-a668fb8a5893\") " pod="openstack/dnsmasq-dns-5fb658dc65-qf22m" Sep 29 09:44:59 crc kubenswrapper[4779]: I0929 09:44:59.571749 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2edf572a-5bab-41c3-8ff8-a668fb8a5893-dns-svc\") pod \"dnsmasq-dns-5fb658dc65-qf22m\" (UID: \"2edf572a-5bab-41c3-8ff8-a668fb8a5893\") " pod="openstack/dnsmasq-dns-5fb658dc65-qf22m" Sep 29 09:44:59 crc kubenswrapper[4779]: I0929 09:44:59.594933 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dsmzl\" (UniqueName: \"kubernetes.io/projected/2edf572a-5bab-41c3-8ff8-a668fb8a5893-kube-api-access-dsmzl\") pod \"dnsmasq-dns-5fb658dc65-qf22m\" (UID: \"2edf572a-5bab-41c3-8ff8-a668fb8a5893\") " pod="openstack/dnsmasq-dns-5fb658dc65-qf22m" Sep 29 09:44:59 crc kubenswrapper[4779]: I0929 09:44:59.656610 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7687d5c5c-92rcn" Sep 29 09:44:59 crc kubenswrapper[4779]: I0929 09:44:59.759546 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5fb658dc65-qf22m" Sep 29 09:45:00 crc kubenswrapper[4779]: I0929 09:45:00.139424 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7687d5c5c-92rcn"] Sep 29 09:45:00 crc kubenswrapper[4779]: W0929 09:45:00.151631 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod39c36338_59ac_4873_9218_f99df307efc3.slice/crio-4bd272758eb1173f9c071e4307da3b8ae38d9e47a4ceea5913b5c63ae0936435 WatchSource:0}: Error finding container 4bd272758eb1173f9c071e4307da3b8ae38d9e47a4ceea5913b5c63ae0936435: Status 404 returned error can't find the container with id 4bd272758eb1173f9c071e4307da3b8ae38d9e47a4ceea5913b5c63ae0936435 Sep 29 09:45:00 crc kubenswrapper[4779]: I0929 09:45:00.155036 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29318985-gbvrg"] Sep 29 09:45:00 crc kubenswrapper[4779]: I0929 09:45:00.156007 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29318985-gbvrg" Sep 29 09:45:00 crc kubenswrapper[4779]: I0929 09:45:00.166521 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 29 09:45:00 crc kubenswrapper[4779]: I0929 09:45:00.167063 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 29 09:45:00 crc kubenswrapper[4779]: I0929 09:45:00.171974 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29318985-gbvrg"] Sep 29 09:45:00 crc kubenswrapper[4779]: I0929 09:45:00.181258 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d40756b7-84bf-4295-a020-07fbd8ffa388-secret-volume\") pod \"collect-profiles-29318985-gbvrg\" (UID: \"d40756b7-84bf-4295-a020-07fbd8ffa388\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29318985-gbvrg" Sep 29 09:45:00 crc kubenswrapper[4779]: I0929 09:45:00.181304 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d40756b7-84bf-4295-a020-07fbd8ffa388-config-volume\") pod \"collect-profiles-29318985-gbvrg\" (UID: \"d40756b7-84bf-4295-a020-07fbd8ffa388\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29318985-gbvrg" Sep 29 09:45:00 crc kubenswrapper[4779]: I0929 09:45:00.181429 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kh6b9\" (UniqueName: \"kubernetes.io/projected/d40756b7-84bf-4295-a020-07fbd8ffa388-kube-api-access-kh6b9\") pod \"collect-profiles-29318985-gbvrg\" (UID: \"d40756b7-84bf-4295-a020-07fbd8ffa388\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29318985-gbvrg" Sep 29 09:45:00 crc kubenswrapper[4779]: I0929 09:45:00.236851 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5fb658dc65-qf22m"] Sep 29 09:45:00 crc kubenswrapper[4779]: I0929 09:45:00.282406 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d40756b7-84bf-4295-a020-07fbd8ffa388-secret-volume\") pod \"collect-profiles-29318985-gbvrg\" (UID: \"d40756b7-84bf-4295-a020-07fbd8ffa388\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29318985-gbvrg" Sep 29 09:45:00 crc kubenswrapper[4779]: I0929 09:45:00.282464 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d40756b7-84bf-4295-a020-07fbd8ffa388-config-volume\") pod \"collect-profiles-29318985-gbvrg\" (UID: \"d40756b7-84bf-4295-a020-07fbd8ffa388\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29318985-gbvrg" Sep 29 09:45:00 crc kubenswrapper[4779]: I0929 09:45:00.282546 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kh6b9\" (UniqueName: \"kubernetes.io/projected/d40756b7-84bf-4295-a020-07fbd8ffa388-kube-api-access-kh6b9\") pod \"collect-profiles-29318985-gbvrg\" (UID: \"d40756b7-84bf-4295-a020-07fbd8ffa388\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29318985-gbvrg" Sep 29 09:45:00 crc kubenswrapper[4779]: I0929 09:45:00.283801 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d40756b7-84bf-4295-a020-07fbd8ffa388-config-volume\") pod \"collect-profiles-29318985-gbvrg\" (UID: \"d40756b7-84bf-4295-a020-07fbd8ffa388\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29318985-gbvrg" Sep 29 09:45:00 crc kubenswrapper[4779]: I0929 09:45:00.287337 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d40756b7-84bf-4295-a020-07fbd8ffa388-secret-volume\") pod \"collect-profiles-29318985-gbvrg\" (UID: \"d40756b7-84bf-4295-a020-07fbd8ffa388\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29318985-gbvrg" Sep 29 09:45:00 crc kubenswrapper[4779]: I0929 09:45:00.306647 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kh6b9\" (UniqueName: \"kubernetes.io/projected/d40756b7-84bf-4295-a020-07fbd8ffa388-kube-api-access-kh6b9\") pod \"collect-profiles-29318985-gbvrg\" (UID: \"d40756b7-84bf-4295-a020-07fbd8ffa388\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29318985-gbvrg" Sep 29 09:45:00 crc kubenswrapper[4779]: I0929 09:45:00.482610 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29318985-gbvrg" Sep 29 09:45:00 crc kubenswrapper[4779]: I0929 09:45:00.841875 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7687d5c5c-92rcn" event={"ID":"39c36338-59ac-4873-9218-f99df307efc3","Type":"ContainerStarted","Data":"4bd272758eb1173f9c071e4307da3b8ae38d9e47a4ceea5913b5c63ae0936435"} Sep 29 09:45:00 crc kubenswrapper[4779]: I0929 09:45:00.844116 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5fb658dc65-qf22m" event={"ID":"2edf572a-5bab-41c3-8ff8-a668fb8a5893","Type":"ContainerStarted","Data":"bb1f2904293b5d2c5782d8f9fceff9cd7810c1992c8f3595a4df74bd1f3ebc67"} Sep 29 09:45:01 crc kubenswrapper[4779]: I0929 09:45:01.001867 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29318985-gbvrg"] Sep 29 09:45:01 crc kubenswrapper[4779]: W0929 09:45:01.019673 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd40756b7_84bf_4295_a020_07fbd8ffa388.slice/crio-0fbe2883e024b8dfba0f0d16ceaa618ea3ab909074f43ac84ee726f7fe89e5c7 WatchSource:0}: Error finding container 0fbe2883e024b8dfba0f0d16ceaa618ea3ab909074f43ac84ee726f7fe89e5c7: Status 404 returned error can't find the container with id 0fbe2883e024b8dfba0f0d16ceaa618ea3ab909074f43ac84ee726f7fe89e5c7 Sep 29 09:45:01 crc kubenswrapper[4779]: I0929 09:45:01.861942 4779 generic.go:334] "Generic (PLEG): container finished" podID="d40756b7-84bf-4295-a020-07fbd8ffa388" containerID="5b2fdc0671308293ca34f8786edd66bead5898c3c362ea89cd1528aa4121af3a" exitCode=0 Sep 29 09:45:01 crc kubenswrapper[4779]: I0929 09:45:01.862069 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29318985-gbvrg" event={"ID":"d40756b7-84bf-4295-a020-07fbd8ffa388","Type":"ContainerDied","Data":"5b2fdc0671308293ca34f8786edd66bead5898c3c362ea89cd1528aa4121af3a"} Sep 29 09:45:01 crc kubenswrapper[4779]: I0929 09:45:01.862300 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29318985-gbvrg" event={"ID":"d40756b7-84bf-4295-a020-07fbd8ffa388","Type":"ContainerStarted","Data":"0fbe2883e024b8dfba0f0d16ceaa618ea3ab909074f43ac84ee726f7fe89e5c7"} Sep 29 09:45:03 crc kubenswrapper[4779]: I0929 09:45:03.225572 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29318985-gbvrg" Sep 29 09:45:03 crc kubenswrapper[4779]: I0929 09:45:03.329613 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kh6b9\" (UniqueName: \"kubernetes.io/projected/d40756b7-84bf-4295-a020-07fbd8ffa388-kube-api-access-kh6b9\") pod \"d40756b7-84bf-4295-a020-07fbd8ffa388\" (UID: \"d40756b7-84bf-4295-a020-07fbd8ffa388\") " Sep 29 09:45:03 crc kubenswrapper[4779]: I0929 09:45:03.329707 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d40756b7-84bf-4295-a020-07fbd8ffa388-config-volume\") pod \"d40756b7-84bf-4295-a020-07fbd8ffa388\" (UID: \"d40756b7-84bf-4295-a020-07fbd8ffa388\") " Sep 29 09:45:03 crc kubenswrapper[4779]: I0929 09:45:03.329749 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d40756b7-84bf-4295-a020-07fbd8ffa388-secret-volume\") pod \"d40756b7-84bf-4295-a020-07fbd8ffa388\" (UID: \"d40756b7-84bf-4295-a020-07fbd8ffa388\") " Sep 29 09:45:03 crc kubenswrapper[4779]: I0929 09:45:03.330541 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d40756b7-84bf-4295-a020-07fbd8ffa388-config-volume" (OuterVolumeSpecName: "config-volume") pod "d40756b7-84bf-4295-a020-07fbd8ffa388" (UID: "d40756b7-84bf-4295-a020-07fbd8ffa388"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:45:03 crc kubenswrapper[4779]: I0929 09:45:03.336679 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d40756b7-84bf-4295-a020-07fbd8ffa388-kube-api-access-kh6b9" (OuterVolumeSpecName: "kube-api-access-kh6b9") pod "d40756b7-84bf-4295-a020-07fbd8ffa388" (UID: "d40756b7-84bf-4295-a020-07fbd8ffa388"). InnerVolumeSpecName "kube-api-access-kh6b9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:45:03 crc kubenswrapper[4779]: I0929 09:45:03.340562 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d40756b7-84bf-4295-a020-07fbd8ffa388-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "d40756b7-84bf-4295-a020-07fbd8ffa388" (UID: "d40756b7-84bf-4295-a020-07fbd8ffa388"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:45:03 crc kubenswrapper[4779]: I0929 09:45:03.433214 4779 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d40756b7-84bf-4295-a020-07fbd8ffa388-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 29 09:45:03 crc kubenswrapper[4779]: I0929 09:45:03.433524 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kh6b9\" (UniqueName: \"kubernetes.io/projected/d40756b7-84bf-4295-a020-07fbd8ffa388-kube-api-access-kh6b9\") on node \"crc\" DevicePath \"\"" Sep 29 09:45:03 crc kubenswrapper[4779]: I0929 09:45:03.433615 4779 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d40756b7-84bf-4295-a020-07fbd8ffa388-config-volume\") on node \"crc\" DevicePath \"\"" Sep 29 09:45:03 crc kubenswrapper[4779]: I0929 09:45:03.537039 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5fb658dc65-qf22m"] Sep 29 09:45:03 crc kubenswrapper[4779]: I0929 09:45:03.580412 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5b8b7cfc7-dnc8g"] Sep 29 09:45:03 crc kubenswrapper[4779]: E0929 09:45:03.580715 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d40756b7-84bf-4295-a020-07fbd8ffa388" containerName="collect-profiles" Sep 29 09:45:03 crc kubenswrapper[4779]: I0929 09:45:03.580729 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="d40756b7-84bf-4295-a020-07fbd8ffa388" containerName="collect-profiles" Sep 29 09:45:03 crc kubenswrapper[4779]: I0929 09:45:03.580889 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="d40756b7-84bf-4295-a020-07fbd8ffa388" containerName="collect-profiles" Sep 29 09:45:03 crc kubenswrapper[4779]: I0929 09:45:03.581646 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b8b7cfc7-dnc8g" Sep 29 09:45:03 crc kubenswrapper[4779]: I0929 09:45:03.598680 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5b8b7cfc7-dnc8g"] Sep 29 09:45:03 crc kubenswrapper[4779]: I0929 09:45:03.636739 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/53d6662d-fb9b-4a71-84e7-13716b4a18b1-config\") pod \"dnsmasq-dns-5b8b7cfc7-dnc8g\" (UID: \"53d6662d-fb9b-4a71-84e7-13716b4a18b1\") " pod="openstack/dnsmasq-dns-5b8b7cfc7-dnc8g" Sep 29 09:45:03 crc kubenswrapper[4779]: I0929 09:45:03.636792 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/53d6662d-fb9b-4a71-84e7-13716b4a18b1-dns-svc\") pod \"dnsmasq-dns-5b8b7cfc7-dnc8g\" (UID: \"53d6662d-fb9b-4a71-84e7-13716b4a18b1\") " pod="openstack/dnsmasq-dns-5b8b7cfc7-dnc8g" Sep 29 09:45:03 crc kubenswrapper[4779]: I0929 09:45:03.636855 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bmcpr\" (UniqueName: \"kubernetes.io/projected/53d6662d-fb9b-4a71-84e7-13716b4a18b1-kube-api-access-bmcpr\") pod \"dnsmasq-dns-5b8b7cfc7-dnc8g\" (UID: \"53d6662d-fb9b-4a71-84e7-13716b4a18b1\") " pod="openstack/dnsmasq-dns-5b8b7cfc7-dnc8g" Sep 29 09:45:03 crc kubenswrapper[4779]: I0929 09:45:03.737762 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bmcpr\" (UniqueName: \"kubernetes.io/projected/53d6662d-fb9b-4a71-84e7-13716b4a18b1-kube-api-access-bmcpr\") pod \"dnsmasq-dns-5b8b7cfc7-dnc8g\" (UID: \"53d6662d-fb9b-4a71-84e7-13716b4a18b1\") " pod="openstack/dnsmasq-dns-5b8b7cfc7-dnc8g" Sep 29 09:45:03 crc kubenswrapper[4779]: I0929 09:45:03.737884 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/53d6662d-fb9b-4a71-84e7-13716b4a18b1-config\") pod \"dnsmasq-dns-5b8b7cfc7-dnc8g\" (UID: \"53d6662d-fb9b-4a71-84e7-13716b4a18b1\") " pod="openstack/dnsmasq-dns-5b8b7cfc7-dnc8g" Sep 29 09:45:03 crc kubenswrapper[4779]: I0929 09:45:03.737928 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/53d6662d-fb9b-4a71-84e7-13716b4a18b1-dns-svc\") pod \"dnsmasq-dns-5b8b7cfc7-dnc8g\" (UID: \"53d6662d-fb9b-4a71-84e7-13716b4a18b1\") " pod="openstack/dnsmasq-dns-5b8b7cfc7-dnc8g" Sep 29 09:45:03 crc kubenswrapper[4779]: I0929 09:45:03.738754 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/53d6662d-fb9b-4a71-84e7-13716b4a18b1-dns-svc\") pod \"dnsmasq-dns-5b8b7cfc7-dnc8g\" (UID: \"53d6662d-fb9b-4a71-84e7-13716b4a18b1\") " pod="openstack/dnsmasq-dns-5b8b7cfc7-dnc8g" Sep 29 09:45:03 crc kubenswrapper[4779]: I0929 09:45:03.739452 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/53d6662d-fb9b-4a71-84e7-13716b4a18b1-config\") pod \"dnsmasq-dns-5b8b7cfc7-dnc8g\" (UID: \"53d6662d-fb9b-4a71-84e7-13716b4a18b1\") " pod="openstack/dnsmasq-dns-5b8b7cfc7-dnc8g" Sep 29 09:45:03 crc kubenswrapper[4779]: I0929 09:45:03.756051 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bmcpr\" (UniqueName: \"kubernetes.io/projected/53d6662d-fb9b-4a71-84e7-13716b4a18b1-kube-api-access-bmcpr\") pod \"dnsmasq-dns-5b8b7cfc7-dnc8g\" (UID: \"53d6662d-fb9b-4a71-84e7-13716b4a18b1\") " pod="openstack/dnsmasq-dns-5b8b7cfc7-dnc8g" Sep 29 09:45:03 crc kubenswrapper[4779]: I0929 09:45:03.873646 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7687d5c5c-92rcn"] Sep 29 09:45:03 crc kubenswrapper[4779]: I0929 09:45:03.888260 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29318985-gbvrg" event={"ID":"d40756b7-84bf-4295-a020-07fbd8ffa388","Type":"ContainerDied","Data":"0fbe2883e024b8dfba0f0d16ceaa618ea3ab909074f43ac84ee726f7fe89e5c7"} Sep 29 09:45:03 crc kubenswrapper[4779]: I0929 09:45:03.888311 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0fbe2883e024b8dfba0f0d16ceaa618ea3ab909074f43ac84ee726f7fe89e5c7" Sep 29 09:45:03 crc kubenswrapper[4779]: I0929 09:45:03.888316 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29318985-gbvrg" Sep 29 09:45:03 crc kubenswrapper[4779]: I0929 09:45:03.894982 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-b7756dccc-dk8xp"] Sep 29 09:45:03 crc kubenswrapper[4779]: I0929 09:45:03.896239 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b7756dccc-dk8xp" Sep 29 09:45:03 crc kubenswrapper[4779]: I0929 09:45:03.898342 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b8b7cfc7-dnc8g" Sep 29 09:45:03 crc kubenswrapper[4779]: I0929 09:45:03.917683 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b7756dccc-dk8xp"] Sep 29 09:45:03 crc kubenswrapper[4779]: I0929 09:45:03.942147 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1d2e7b20-7b93-45fb-a5c0-4ab547aa036f-dns-svc\") pod \"dnsmasq-dns-b7756dccc-dk8xp\" (UID: \"1d2e7b20-7b93-45fb-a5c0-4ab547aa036f\") " pod="openstack/dnsmasq-dns-b7756dccc-dk8xp" Sep 29 09:45:03 crc kubenswrapper[4779]: I0929 09:45:03.942191 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nzfhv\" (UniqueName: \"kubernetes.io/projected/1d2e7b20-7b93-45fb-a5c0-4ab547aa036f-kube-api-access-nzfhv\") pod \"dnsmasq-dns-b7756dccc-dk8xp\" (UID: \"1d2e7b20-7b93-45fb-a5c0-4ab547aa036f\") " pod="openstack/dnsmasq-dns-b7756dccc-dk8xp" Sep 29 09:45:03 crc kubenswrapper[4779]: I0929 09:45:03.942350 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1d2e7b20-7b93-45fb-a5c0-4ab547aa036f-config\") pod \"dnsmasq-dns-b7756dccc-dk8xp\" (UID: \"1d2e7b20-7b93-45fb-a5c0-4ab547aa036f\") " pod="openstack/dnsmasq-dns-b7756dccc-dk8xp" Sep 29 09:45:04 crc kubenswrapper[4779]: I0929 09:45:04.044720 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1d2e7b20-7b93-45fb-a5c0-4ab547aa036f-config\") pod \"dnsmasq-dns-b7756dccc-dk8xp\" (UID: \"1d2e7b20-7b93-45fb-a5c0-4ab547aa036f\") " pod="openstack/dnsmasq-dns-b7756dccc-dk8xp" Sep 29 09:45:04 crc kubenswrapper[4779]: I0929 09:45:04.044797 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1d2e7b20-7b93-45fb-a5c0-4ab547aa036f-dns-svc\") pod \"dnsmasq-dns-b7756dccc-dk8xp\" (UID: \"1d2e7b20-7b93-45fb-a5c0-4ab547aa036f\") " pod="openstack/dnsmasq-dns-b7756dccc-dk8xp" Sep 29 09:45:04 crc kubenswrapper[4779]: I0929 09:45:04.044825 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nzfhv\" (UniqueName: \"kubernetes.io/projected/1d2e7b20-7b93-45fb-a5c0-4ab547aa036f-kube-api-access-nzfhv\") pod \"dnsmasq-dns-b7756dccc-dk8xp\" (UID: \"1d2e7b20-7b93-45fb-a5c0-4ab547aa036f\") " pod="openstack/dnsmasq-dns-b7756dccc-dk8xp" Sep 29 09:45:04 crc kubenswrapper[4779]: I0929 09:45:04.045763 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1d2e7b20-7b93-45fb-a5c0-4ab547aa036f-dns-svc\") pod \"dnsmasq-dns-b7756dccc-dk8xp\" (UID: \"1d2e7b20-7b93-45fb-a5c0-4ab547aa036f\") " pod="openstack/dnsmasq-dns-b7756dccc-dk8xp" Sep 29 09:45:04 crc kubenswrapper[4779]: I0929 09:45:04.045768 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1d2e7b20-7b93-45fb-a5c0-4ab547aa036f-config\") pod \"dnsmasq-dns-b7756dccc-dk8xp\" (UID: \"1d2e7b20-7b93-45fb-a5c0-4ab547aa036f\") " pod="openstack/dnsmasq-dns-b7756dccc-dk8xp" Sep 29 09:45:04 crc kubenswrapper[4779]: I0929 09:45:04.063926 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nzfhv\" (UniqueName: \"kubernetes.io/projected/1d2e7b20-7b93-45fb-a5c0-4ab547aa036f-kube-api-access-nzfhv\") pod \"dnsmasq-dns-b7756dccc-dk8xp\" (UID: \"1d2e7b20-7b93-45fb-a5c0-4ab547aa036f\") " pod="openstack/dnsmasq-dns-b7756dccc-dk8xp" Sep 29 09:45:04 crc kubenswrapper[4779]: I0929 09:45:04.180781 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b8b7cfc7-dnc8g"] Sep 29 09:45:04 crc kubenswrapper[4779]: I0929 09:45:04.196895 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-9d9d94d6f-fkg46"] Sep 29 09:45:04 crc kubenswrapper[4779]: I0929 09:45:04.198021 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-9d9d94d6f-fkg46" Sep 29 09:45:04 crc kubenswrapper[4779]: I0929 09:45:04.213061 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b7756dccc-dk8xp" Sep 29 09:45:04 crc kubenswrapper[4779]: I0929 09:45:04.213701 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-9d9d94d6f-fkg46"] Sep 29 09:45:04 crc kubenswrapper[4779]: I0929 09:45:04.249915 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/71e1e56c-3b30-4233-a2df-3efe864df26a-config\") pod \"dnsmasq-dns-9d9d94d6f-fkg46\" (UID: \"71e1e56c-3b30-4233-a2df-3efe864df26a\") " pod="openstack/dnsmasq-dns-9d9d94d6f-fkg46" Sep 29 09:45:04 crc kubenswrapper[4779]: I0929 09:45:04.249985 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/71e1e56c-3b30-4233-a2df-3efe864df26a-dns-svc\") pod \"dnsmasq-dns-9d9d94d6f-fkg46\" (UID: \"71e1e56c-3b30-4233-a2df-3efe864df26a\") " pod="openstack/dnsmasq-dns-9d9d94d6f-fkg46" Sep 29 09:45:04 crc kubenswrapper[4779]: I0929 09:45:04.250005 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nnc2r\" (UniqueName: \"kubernetes.io/projected/71e1e56c-3b30-4233-a2df-3efe864df26a-kube-api-access-nnc2r\") pod \"dnsmasq-dns-9d9d94d6f-fkg46\" (UID: \"71e1e56c-3b30-4233-a2df-3efe864df26a\") " pod="openstack/dnsmasq-dns-9d9d94d6f-fkg46" Sep 29 09:45:04 crc kubenswrapper[4779]: I0929 09:45:04.350802 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/71e1e56c-3b30-4233-a2df-3efe864df26a-config\") pod \"dnsmasq-dns-9d9d94d6f-fkg46\" (UID: \"71e1e56c-3b30-4233-a2df-3efe864df26a\") " pod="openstack/dnsmasq-dns-9d9d94d6f-fkg46" Sep 29 09:45:04 crc kubenswrapper[4779]: I0929 09:45:04.350855 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/71e1e56c-3b30-4233-a2df-3efe864df26a-dns-svc\") pod \"dnsmasq-dns-9d9d94d6f-fkg46\" (UID: \"71e1e56c-3b30-4233-a2df-3efe864df26a\") " pod="openstack/dnsmasq-dns-9d9d94d6f-fkg46" Sep 29 09:45:04 crc kubenswrapper[4779]: I0929 09:45:04.350880 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nnc2r\" (UniqueName: \"kubernetes.io/projected/71e1e56c-3b30-4233-a2df-3efe864df26a-kube-api-access-nnc2r\") pod \"dnsmasq-dns-9d9d94d6f-fkg46\" (UID: \"71e1e56c-3b30-4233-a2df-3efe864df26a\") " pod="openstack/dnsmasq-dns-9d9d94d6f-fkg46" Sep 29 09:45:04 crc kubenswrapper[4779]: I0929 09:45:04.351878 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/71e1e56c-3b30-4233-a2df-3efe864df26a-config\") pod \"dnsmasq-dns-9d9d94d6f-fkg46\" (UID: \"71e1e56c-3b30-4233-a2df-3efe864df26a\") " pod="openstack/dnsmasq-dns-9d9d94d6f-fkg46" Sep 29 09:45:04 crc kubenswrapper[4779]: I0929 09:45:04.353448 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/71e1e56c-3b30-4233-a2df-3efe864df26a-dns-svc\") pod \"dnsmasq-dns-9d9d94d6f-fkg46\" (UID: \"71e1e56c-3b30-4233-a2df-3efe864df26a\") " pod="openstack/dnsmasq-dns-9d9d94d6f-fkg46" Sep 29 09:45:04 crc kubenswrapper[4779]: I0929 09:45:04.388501 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nnc2r\" (UniqueName: \"kubernetes.io/projected/71e1e56c-3b30-4233-a2df-3efe864df26a-kube-api-access-nnc2r\") pod \"dnsmasq-dns-9d9d94d6f-fkg46\" (UID: \"71e1e56c-3b30-4233-a2df-3efe864df26a\") " pod="openstack/dnsmasq-dns-9d9d94d6f-fkg46" Sep 29 09:45:04 crc kubenswrapper[4779]: I0929 09:45:04.518416 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-9d9d94d6f-fkg46" Sep 29 09:45:04 crc kubenswrapper[4779]: I0929 09:45:04.756983 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 09:45:04 crc kubenswrapper[4779]: I0929 09:45:04.758688 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 29 09:45:04 crc kubenswrapper[4779]: I0929 09:45:04.760940 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Sep 29 09:45:04 crc kubenswrapper[4779]: I0929 09:45:04.761019 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Sep 29 09:45:04 crc kubenswrapper[4779]: I0929 09:45:04.761103 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Sep 29 09:45:04 crc kubenswrapper[4779]: I0929 09:45:04.761578 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Sep 29 09:45:04 crc kubenswrapper[4779]: I0929 09:45:04.762550 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Sep 29 09:45:04 crc kubenswrapper[4779]: I0929 09:45:04.762555 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-qglqq" Sep 29 09:45:04 crc kubenswrapper[4779]: I0929 09:45:04.762956 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Sep 29 09:45:04 crc kubenswrapper[4779]: I0929 09:45:04.774966 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 09:45:04 crc kubenswrapper[4779]: I0929 09:45:04.859774 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/8faade2a-9a07-45b9-99e4-b448b64afaaa-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"8faade2a-9a07-45b9-99e4-b448b64afaaa\") " pod="openstack/rabbitmq-server-0" Sep 29 09:45:04 crc kubenswrapper[4779]: I0929 09:45:04.859847 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/8faade2a-9a07-45b9-99e4-b448b64afaaa-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"8faade2a-9a07-45b9-99e4-b448b64afaaa\") " pod="openstack/rabbitmq-server-0" Sep 29 09:45:04 crc kubenswrapper[4779]: I0929 09:45:04.859885 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/8faade2a-9a07-45b9-99e4-b448b64afaaa-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"8faade2a-9a07-45b9-99e4-b448b64afaaa\") " pod="openstack/rabbitmq-server-0" Sep 29 09:45:04 crc kubenswrapper[4779]: I0929 09:45:04.859929 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/8faade2a-9a07-45b9-99e4-b448b64afaaa-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"8faade2a-9a07-45b9-99e4-b448b64afaaa\") " pod="openstack/rabbitmq-server-0" Sep 29 09:45:04 crc kubenswrapper[4779]: I0929 09:45:04.859953 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/8faade2a-9a07-45b9-99e4-b448b64afaaa-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"8faade2a-9a07-45b9-99e4-b448b64afaaa\") " pod="openstack/rabbitmq-server-0" Sep 29 09:45:04 crc kubenswrapper[4779]: I0929 09:45:04.859977 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nrmmh\" (UniqueName: \"kubernetes.io/projected/8faade2a-9a07-45b9-99e4-b448b64afaaa-kube-api-access-nrmmh\") pod \"rabbitmq-server-0\" (UID: \"8faade2a-9a07-45b9-99e4-b448b64afaaa\") " pod="openstack/rabbitmq-server-0" Sep 29 09:45:04 crc kubenswrapper[4779]: I0929 09:45:04.859999 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8faade2a-9a07-45b9-99e4-b448b64afaaa-config-data\") pod \"rabbitmq-server-0\" (UID: \"8faade2a-9a07-45b9-99e4-b448b64afaaa\") " pod="openstack/rabbitmq-server-0" Sep 29 09:45:04 crc kubenswrapper[4779]: I0929 09:45:04.860035 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/8faade2a-9a07-45b9-99e4-b448b64afaaa-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"8faade2a-9a07-45b9-99e4-b448b64afaaa\") " pod="openstack/rabbitmq-server-0" Sep 29 09:45:04 crc kubenswrapper[4779]: I0929 09:45:04.860079 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/8faade2a-9a07-45b9-99e4-b448b64afaaa-pod-info\") pod \"rabbitmq-server-0\" (UID: \"8faade2a-9a07-45b9-99e4-b448b64afaaa\") " pod="openstack/rabbitmq-server-0" Sep 29 09:45:04 crc kubenswrapper[4779]: I0929 09:45:04.860099 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/8faade2a-9a07-45b9-99e4-b448b64afaaa-server-conf\") pod \"rabbitmq-server-0\" (UID: \"8faade2a-9a07-45b9-99e4-b448b64afaaa\") " pod="openstack/rabbitmq-server-0" Sep 29 09:45:04 crc kubenswrapper[4779]: I0929 09:45:04.860138 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"8faade2a-9a07-45b9-99e4-b448b64afaaa\") " pod="openstack/rabbitmq-server-0" Sep 29 09:45:04 crc kubenswrapper[4779]: I0929 09:45:04.961003 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/8faade2a-9a07-45b9-99e4-b448b64afaaa-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"8faade2a-9a07-45b9-99e4-b448b64afaaa\") " pod="openstack/rabbitmq-server-0" Sep 29 09:45:04 crc kubenswrapper[4779]: I0929 09:45:04.961328 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/8faade2a-9a07-45b9-99e4-b448b64afaaa-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"8faade2a-9a07-45b9-99e4-b448b64afaaa\") " pod="openstack/rabbitmq-server-0" Sep 29 09:45:04 crc kubenswrapper[4779]: I0929 09:45:04.961352 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/8faade2a-9a07-45b9-99e4-b448b64afaaa-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"8faade2a-9a07-45b9-99e4-b448b64afaaa\") " pod="openstack/rabbitmq-server-0" Sep 29 09:45:04 crc kubenswrapper[4779]: I0929 09:45:04.961371 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/8faade2a-9a07-45b9-99e4-b448b64afaaa-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"8faade2a-9a07-45b9-99e4-b448b64afaaa\") " pod="openstack/rabbitmq-server-0" Sep 29 09:45:04 crc kubenswrapper[4779]: I0929 09:45:04.961386 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/8faade2a-9a07-45b9-99e4-b448b64afaaa-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"8faade2a-9a07-45b9-99e4-b448b64afaaa\") " pod="openstack/rabbitmq-server-0" Sep 29 09:45:04 crc kubenswrapper[4779]: I0929 09:45:04.961402 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nrmmh\" (UniqueName: \"kubernetes.io/projected/8faade2a-9a07-45b9-99e4-b448b64afaaa-kube-api-access-nrmmh\") pod \"rabbitmq-server-0\" (UID: \"8faade2a-9a07-45b9-99e4-b448b64afaaa\") " pod="openstack/rabbitmq-server-0" Sep 29 09:45:04 crc kubenswrapper[4779]: I0929 09:45:04.962241 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/8faade2a-9a07-45b9-99e4-b448b64afaaa-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"8faade2a-9a07-45b9-99e4-b448b64afaaa\") " pod="openstack/rabbitmq-server-0" Sep 29 09:45:04 crc kubenswrapper[4779]: I0929 09:45:04.961421 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8faade2a-9a07-45b9-99e4-b448b64afaaa-config-data\") pod \"rabbitmq-server-0\" (UID: \"8faade2a-9a07-45b9-99e4-b448b64afaaa\") " pod="openstack/rabbitmq-server-0" Sep 29 09:45:04 crc kubenswrapper[4779]: I0929 09:45:04.962328 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/8faade2a-9a07-45b9-99e4-b448b64afaaa-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"8faade2a-9a07-45b9-99e4-b448b64afaaa\") " pod="openstack/rabbitmq-server-0" Sep 29 09:45:04 crc kubenswrapper[4779]: I0929 09:45:04.962430 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8faade2a-9a07-45b9-99e4-b448b64afaaa-config-data\") pod \"rabbitmq-server-0\" (UID: \"8faade2a-9a07-45b9-99e4-b448b64afaaa\") " pod="openstack/rabbitmq-server-0" Sep 29 09:45:04 crc kubenswrapper[4779]: I0929 09:45:04.962708 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/8faade2a-9a07-45b9-99e4-b448b64afaaa-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"8faade2a-9a07-45b9-99e4-b448b64afaaa\") " pod="openstack/rabbitmq-server-0" Sep 29 09:45:04 crc kubenswrapper[4779]: I0929 09:45:04.962743 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/8faade2a-9a07-45b9-99e4-b448b64afaaa-pod-info\") pod \"rabbitmq-server-0\" (UID: \"8faade2a-9a07-45b9-99e4-b448b64afaaa\") " pod="openstack/rabbitmq-server-0" Sep 29 09:45:04 crc kubenswrapper[4779]: I0929 09:45:04.962785 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/8faade2a-9a07-45b9-99e4-b448b64afaaa-server-conf\") pod \"rabbitmq-server-0\" (UID: \"8faade2a-9a07-45b9-99e4-b448b64afaaa\") " pod="openstack/rabbitmq-server-0" Sep 29 09:45:04 crc kubenswrapper[4779]: I0929 09:45:04.962818 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/8faade2a-9a07-45b9-99e4-b448b64afaaa-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"8faade2a-9a07-45b9-99e4-b448b64afaaa\") " pod="openstack/rabbitmq-server-0" Sep 29 09:45:04 crc kubenswrapper[4779]: I0929 09:45:04.962824 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"8faade2a-9a07-45b9-99e4-b448b64afaaa\") " pod="openstack/rabbitmq-server-0" Sep 29 09:45:04 crc kubenswrapper[4779]: I0929 09:45:04.963088 4779 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"8faade2a-9a07-45b9-99e4-b448b64afaaa\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/rabbitmq-server-0" Sep 29 09:45:04 crc kubenswrapper[4779]: I0929 09:45:04.963958 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/8faade2a-9a07-45b9-99e4-b448b64afaaa-server-conf\") pod \"rabbitmq-server-0\" (UID: \"8faade2a-9a07-45b9-99e4-b448b64afaaa\") " pod="openstack/rabbitmq-server-0" Sep 29 09:45:04 crc kubenswrapper[4779]: I0929 09:45:04.969916 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/8faade2a-9a07-45b9-99e4-b448b64afaaa-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"8faade2a-9a07-45b9-99e4-b448b64afaaa\") " pod="openstack/rabbitmq-server-0" Sep 29 09:45:04 crc kubenswrapper[4779]: I0929 09:45:04.976589 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/8faade2a-9a07-45b9-99e4-b448b64afaaa-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"8faade2a-9a07-45b9-99e4-b448b64afaaa\") " pod="openstack/rabbitmq-server-0" Sep 29 09:45:04 crc kubenswrapper[4779]: I0929 09:45:04.981527 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/8faade2a-9a07-45b9-99e4-b448b64afaaa-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"8faade2a-9a07-45b9-99e4-b448b64afaaa\") " pod="openstack/rabbitmq-server-0" Sep 29 09:45:04 crc kubenswrapper[4779]: I0929 09:45:04.983420 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/8faade2a-9a07-45b9-99e4-b448b64afaaa-pod-info\") pod \"rabbitmq-server-0\" (UID: \"8faade2a-9a07-45b9-99e4-b448b64afaaa\") " pod="openstack/rabbitmq-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.001807 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nrmmh\" (UniqueName: \"kubernetes.io/projected/8faade2a-9a07-45b9-99e4-b448b64afaaa-kube-api-access-nrmmh\") pod \"rabbitmq-server-0\" (UID: \"8faade2a-9a07-45b9-99e4-b448b64afaaa\") " pod="openstack/rabbitmq-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.003124 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"8faade2a-9a07-45b9-99e4-b448b64afaaa\") " pod="openstack/rabbitmq-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.043420 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.044674 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.047074 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.049786 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.050623 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-6fz62" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.050734 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.050805 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.050830 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.050830 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.050930 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.089759 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.168320 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/779f829e-6240-47a5-8d8d-9e279d316df7-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"779f829e-6240-47a5-8d8d-9e279d316df7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.168375 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/779f829e-6240-47a5-8d8d-9e279d316df7-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"779f829e-6240-47a5-8d8d-9e279d316df7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.168396 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/779f829e-6240-47a5-8d8d-9e279d316df7-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"779f829e-6240-47a5-8d8d-9e279d316df7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.168413 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/779f829e-6240-47a5-8d8d-9e279d316df7-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"779f829e-6240-47a5-8d8d-9e279d316df7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.168507 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/779f829e-6240-47a5-8d8d-9e279d316df7-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"779f829e-6240-47a5-8d8d-9e279d316df7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.168574 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"779f829e-6240-47a5-8d8d-9e279d316df7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.168661 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/779f829e-6240-47a5-8d8d-9e279d316df7-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"779f829e-6240-47a5-8d8d-9e279d316df7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.168692 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/779f829e-6240-47a5-8d8d-9e279d316df7-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"779f829e-6240-47a5-8d8d-9e279d316df7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.168768 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/779f829e-6240-47a5-8d8d-9e279d316df7-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"779f829e-6240-47a5-8d8d-9e279d316df7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.168832 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/779f829e-6240-47a5-8d8d-9e279d316df7-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"779f829e-6240-47a5-8d8d-9e279d316df7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.168857 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vz796\" (UniqueName: \"kubernetes.io/projected/779f829e-6240-47a5-8d8d-9e279d316df7-kube-api-access-vz796\") pod \"rabbitmq-cell1-server-0\" (UID: \"779f829e-6240-47a5-8d8d-9e279d316df7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.270254 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/779f829e-6240-47a5-8d8d-9e279d316df7-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"779f829e-6240-47a5-8d8d-9e279d316df7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.270300 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vz796\" (UniqueName: \"kubernetes.io/projected/779f829e-6240-47a5-8d8d-9e279d316df7-kube-api-access-vz796\") pod \"rabbitmq-cell1-server-0\" (UID: \"779f829e-6240-47a5-8d8d-9e279d316df7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.270323 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/779f829e-6240-47a5-8d8d-9e279d316df7-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"779f829e-6240-47a5-8d8d-9e279d316df7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.270344 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/779f829e-6240-47a5-8d8d-9e279d316df7-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"779f829e-6240-47a5-8d8d-9e279d316df7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.270366 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/779f829e-6240-47a5-8d8d-9e279d316df7-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"779f829e-6240-47a5-8d8d-9e279d316df7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.270381 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/779f829e-6240-47a5-8d8d-9e279d316df7-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"779f829e-6240-47a5-8d8d-9e279d316df7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.270395 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/779f829e-6240-47a5-8d8d-9e279d316df7-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"779f829e-6240-47a5-8d8d-9e279d316df7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.270419 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"779f829e-6240-47a5-8d8d-9e279d316df7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.270456 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/779f829e-6240-47a5-8d8d-9e279d316df7-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"779f829e-6240-47a5-8d8d-9e279d316df7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.270474 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/779f829e-6240-47a5-8d8d-9e279d316df7-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"779f829e-6240-47a5-8d8d-9e279d316df7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.270508 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/779f829e-6240-47a5-8d8d-9e279d316df7-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"779f829e-6240-47a5-8d8d-9e279d316df7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.271795 4779 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"779f829e-6240-47a5-8d8d-9e279d316df7\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.271816 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/779f829e-6240-47a5-8d8d-9e279d316df7-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"779f829e-6240-47a5-8d8d-9e279d316df7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.272642 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/779f829e-6240-47a5-8d8d-9e279d316df7-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"779f829e-6240-47a5-8d8d-9e279d316df7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.273172 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/779f829e-6240-47a5-8d8d-9e279d316df7-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"779f829e-6240-47a5-8d8d-9e279d316df7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.273382 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/779f829e-6240-47a5-8d8d-9e279d316df7-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"779f829e-6240-47a5-8d8d-9e279d316df7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.274702 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/779f829e-6240-47a5-8d8d-9e279d316df7-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"779f829e-6240-47a5-8d8d-9e279d316df7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.276435 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/779f829e-6240-47a5-8d8d-9e279d316df7-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"779f829e-6240-47a5-8d8d-9e279d316df7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.277019 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/779f829e-6240-47a5-8d8d-9e279d316df7-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"779f829e-6240-47a5-8d8d-9e279d316df7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.280279 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/779f829e-6240-47a5-8d8d-9e279d316df7-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"779f829e-6240-47a5-8d8d-9e279d316df7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.280635 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/779f829e-6240-47a5-8d8d-9e279d316df7-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"779f829e-6240-47a5-8d8d-9e279d316df7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.287618 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vz796\" (UniqueName: \"kubernetes.io/projected/779f829e-6240-47a5-8d8d-9e279d316df7-kube-api-access-vz796\") pod \"rabbitmq-cell1-server-0\" (UID: \"779f829e-6240-47a5-8d8d-9e279d316df7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.292501 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"779f829e-6240-47a5-8d8d-9e279d316df7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.332358 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-notifications-server-0"] Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.334321 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-notifications-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.337890 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-notifications-server-dockercfg-gpdmv" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.338024 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-notifications-svc" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.339504 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-notifications-plugins-conf" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.339890 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-notifications-config-data" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.340164 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-notifications-default-user" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.340390 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-notifications-server-conf" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.340667 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-notifications-erlang-cookie" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.349425 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-notifications-server-0"] Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.380370 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.473729 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/daa6f578-ea44-4555-be0c-e2b8662386f0-pod-info\") pod \"rabbitmq-notifications-server-0\" (UID: \"daa6f578-ea44-4555-be0c-e2b8662386f0\") " pod="openstack/rabbitmq-notifications-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.473794 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/daa6f578-ea44-4555-be0c-e2b8662386f0-rabbitmq-confd\") pod \"rabbitmq-notifications-server-0\" (UID: \"daa6f578-ea44-4555-be0c-e2b8662386f0\") " pod="openstack/rabbitmq-notifications-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.473819 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/daa6f578-ea44-4555-be0c-e2b8662386f0-plugins-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"daa6f578-ea44-4555-be0c-e2b8662386f0\") " pod="openstack/rabbitmq-notifications-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.473869 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/daa6f578-ea44-4555-be0c-e2b8662386f0-config-data\") pod \"rabbitmq-notifications-server-0\" (UID: \"daa6f578-ea44-4555-be0c-e2b8662386f0\") " pod="openstack/rabbitmq-notifications-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.473955 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/daa6f578-ea44-4555-be0c-e2b8662386f0-rabbitmq-erlang-cookie\") pod \"rabbitmq-notifications-server-0\" (UID: \"daa6f578-ea44-4555-be0c-e2b8662386f0\") " pod="openstack/rabbitmq-notifications-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.474022 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/daa6f578-ea44-4555-be0c-e2b8662386f0-erlang-cookie-secret\") pod \"rabbitmq-notifications-server-0\" (UID: \"daa6f578-ea44-4555-be0c-e2b8662386f0\") " pod="openstack/rabbitmq-notifications-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.474049 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ffqq7\" (UniqueName: \"kubernetes.io/projected/daa6f578-ea44-4555-be0c-e2b8662386f0-kube-api-access-ffqq7\") pod \"rabbitmq-notifications-server-0\" (UID: \"daa6f578-ea44-4555-be0c-e2b8662386f0\") " pod="openstack/rabbitmq-notifications-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.474104 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/daa6f578-ea44-4555-be0c-e2b8662386f0-rabbitmq-tls\") pod \"rabbitmq-notifications-server-0\" (UID: \"daa6f578-ea44-4555-be0c-e2b8662386f0\") " pod="openstack/rabbitmq-notifications-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.474148 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/daa6f578-ea44-4555-be0c-e2b8662386f0-server-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"daa6f578-ea44-4555-be0c-e2b8662386f0\") " pod="openstack/rabbitmq-notifications-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.474211 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/daa6f578-ea44-4555-be0c-e2b8662386f0-rabbitmq-plugins\") pod \"rabbitmq-notifications-server-0\" (UID: \"daa6f578-ea44-4555-be0c-e2b8662386f0\") " pod="openstack/rabbitmq-notifications-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.474234 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-notifications-server-0\" (UID: \"daa6f578-ea44-4555-be0c-e2b8662386f0\") " pod="openstack/rabbitmq-notifications-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.574974 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/daa6f578-ea44-4555-be0c-e2b8662386f0-config-data\") pod \"rabbitmq-notifications-server-0\" (UID: \"daa6f578-ea44-4555-be0c-e2b8662386f0\") " pod="openstack/rabbitmq-notifications-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.575013 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/daa6f578-ea44-4555-be0c-e2b8662386f0-rabbitmq-erlang-cookie\") pod \"rabbitmq-notifications-server-0\" (UID: \"daa6f578-ea44-4555-be0c-e2b8662386f0\") " pod="openstack/rabbitmq-notifications-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.575044 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/daa6f578-ea44-4555-be0c-e2b8662386f0-erlang-cookie-secret\") pod \"rabbitmq-notifications-server-0\" (UID: \"daa6f578-ea44-4555-be0c-e2b8662386f0\") " pod="openstack/rabbitmq-notifications-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.575067 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ffqq7\" (UniqueName: \"kubernetes.io/projected/daa6f578-ea44-4555-be0c-e2b8662386f0-kube-api-access-ffqq7\") pod \"rabbitmq-notifications-server-0\" (UID: \"daa6f578-ea44-4555-be0c-e2b8662386f0\") " pod="openstack/rabbitmq-notifications-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.575093 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/daa6f578-ea44-4555-be0c-e2b8662386f0-rabbitmq-tls\") pod \"rabbitmq-notifications-server-0\" (UID: \"daa6f578-ea44-4555-be0c-e2b8662386f0\") " pod="openstack/rabbitmq-notifications-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.575126 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/daa6f578-ea44-4555-be0c-e2b8662386f0-server-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"daa6f578-ea44-4555-be0c-e2b8662386f0\") " pod="openstack/rabbitmq-notifications-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.575148 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/daa6f578-ea44-4555-be0c-e2b8662386f0-rabbitmq-plugins\") pod \"rabbitmq-notifications-server-0\" (UID: \"daa6f578-ea44-4555-be0c-e2b8662386f0\") " pod="openstack/rabbitmq-notifications-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.575169 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-notifications-server-0\" (UID: \"daa6f578-ea44-4555-be0c-e2b8662386f0\") " pod="openstack/rabbitmq-notifications-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.575200 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/daa6f578-ea44-4555-be0c-e2b8662386f0-pod-info\") pod \"rabbitmq-notifications-server-0\" (UID: \"daa6f578-ea44-4555-be0c-e2b8662386f0\") " pod="openstack/rabbitmq-notifications-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.575220 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/daa6f578-ea44-4555-be0c-e2b8662386f0-rabbitmq-confd\") pod \"rabbitmq-notifications-server-0\" (UID: \"daa6f578-ea44-4555-be0c-e2b8662386f0\") " pod="openstack/rabbitmq-notifications-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.575236 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/daa6f578-ea44-4555-be0c-e2b8662386f0-plugins-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"daa6f578-ea44-4555-be0c-e2b8662386f0\") " pod="openstack/rabbitmq-notifications-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.575938 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/daa6f578-ea44-4555-be0c-e2b8662386f0-plugins-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"daa6f578-ea44-4555-be0c-e2b8662386f0\") " pod="openstack/rabbitmq-notifications-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.576173 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/daa6f578-ea44-4555-be0c-e2b8662386f0-rabbitmq-erlang-cookie\") pod \"rabbitmq-notifications-server-0\" (UID: \"daa6f578-ea44-4555-be0c-e2b8662386f0\") " pod="openstack/rabbitmq-notifications-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.576367 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/daa6f578-ea44-4555-be0c-e2b8662386f0-config-data\") pod \"rabbitmq-notifications-server-0\" (UID: \"daa6f578-ea44-4555-be0c-e2b8662386f0\") " pod="openstack/rabbitmq-notifications-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.577575 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/daa6f578-ea44-4555-be0c-e2b8662386f0-rabbitmq-plugins\") pod \"rabbitmq-notifications-server-0\" (UID: \"daa6f578-ea44-4555-be0c-e2b8662386f0\") " pod="openstack/rabbitmq-notifications-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.578440 4779 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-notifications-server-0\" (UID: \"daa6f578-ea44-4555-be0c-e2b8662386f0\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/rabbitmq-notifications-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.578541 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/daa6f578-ea44-4555-be0c-e2b8662386f0-server-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"daa6f578-ea44-4555-be0c-e2b8662386f0\") " pod="openstack/rabbitmq-notifications-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.580512 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/daa6f578-ea44-4555-be0c-e2b8662386f0-erlang-cookie-secret\") pod \"rabbitmq-notifications-server-0\" (UID: \"daa6f578-ea44-4555-be0c-e2b8662386f0\") " pod="openstack/rabbitmq-notifications-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.586733 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/daa6f578-ea44-4555-be0c-e2b8662386f0-pod-info\") pod \"rabbitmq-notifications-server-0\" (UID: \"daa6f578-ea44-4555-be0c-e2b8662386f0\") " pod="openstack/rabbitmq-notifications-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.589702 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/daa6f578-ea44-4555-be0c-e2b8662386f0-rabbitmq-confd\") pod \"rabbitmq-notifications-server-0\" (UID: \"daa6f578-ea44-4555-be0c-e2b8662386f0\") " pod="openstack/rabbitmq-notifications-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.590565 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/daa6f578-ea44-4555-be0c-e2b8662386f0-rabbitmq-tls\") pod \"rabbitmq-notifications-server-0\" (UID: \"daa6f578-ea44-4555-be0c-e2b8662386f0\") " pod="openstack/rabbitmq-notifications-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.594317 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ffqq7\" (UniqueName: \"kubernetes.io/projected/daa6f578-ea44-4555-be0c-e2b8662386f0-kube-api-access-ffqq7\") pod \"rabbitmq-notifications-server-0\" (UID: \"daa6f578-ea44-4555-be0c-e2b8662386f0\") " pod="openstack/rabbitmq-notifications-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.607475 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-notifications-server-0\" (UID: \"daa6f578-ea44-4555-be0c-e2b8662386f0\") " pod="openstack/rabbitmq-notifications-server-0" Sep 29 09:45:05 crc kubenswrapper[4779]: I0929 09:45:05.659917 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-notifications-server-0" Sep 29 09:45:07 crc kubenswrapper[4779]: I0929 09:45:07.957680 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Sep 29 09:45:07 crc kubenswrapper[4779]: I0929 09:45:07.959803 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Sep 29 09:45:07 crc kubenswrapper[4779]: I0929 09:45:07.962857 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Sep 29 09:45:07 crc kubenswrapper[4779]: I0929 09:45:07.963238 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-rtftm" Sep 29 09:45:07 crc kubenswrapper[4779]: I0929 09:45:07.967325 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Sep 29 09:45:07 crc kubenswrapper[4779]: I0929 09:45:07.967635 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Sep 29 09:45:07 crc kubenswrapper[4779]: I0929 09:45:07.968124 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Sep 29 09:45:07 crc kubenswrapper[4779]: I0929 09:45:07.971613 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Sep 29 09:45:07 crc kubenswrapper[4779]: I0929 09:45:07.979706 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.061358 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.062723 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.064663 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-7xxxn" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.064967 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.065173 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.069173 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.077390 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.117723 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bb5f88d2-6663-4ed6-a7a7-93ee500c9edf-operator-scripts\") pod \"openstack-galera-0\" (UID: \"bb5f88d2-6663-4ed6-a7a7-93ee500c9edf\") " pod="openstack/openstack-galera-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.117781 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-galera-0\" (UID: \"bb5f88d2-6663-4ed6-a7a7-93ee500c9edf\") " pod="openstack/openstack-galera-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.117804 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/bb5f88d2-6663-4ed6-a7a7-93ee500c9edf-config-data-generated\") pod \"openstack-galera-0\" (UID: \"bb5f88d2-6663-4ed6-a7a7-93ee500c9edf\") " pod="openstack/openstack-galera-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.117826 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/bb5f88d2-6663-4ed6-a7a7-93ee500c9edf-config-data-default\") pod \"openstack-galera-0\" (UID: \"bb5f88d2-6663-4ed6-a7a7-93ee500c9edf\") " pod="openstack/openstack-galera-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.117864 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bb5f88d2-6663-4ed6-a7a7-93ee500c9edf-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"bb5f88d2-6663-4ed6-a7a7-93ee500c9edf\") " pod="openstack/openstack-galera-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.117924 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kfqjs\" (UniqueName: \"kubernetes.io/projected/bb5f88d2-6663-4ed6-a7a7-93ee500c9edf-kube-api-access-kfqjs\") pod \"openstack-galera-0\" (UID: \"bb5f88d2-6663-4ed6-a7a7-93ee500c9edf\") " pod="openstack/openstack-galera-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.117953 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/bb5f88d2-6663-4ed6-a7a7-93ee500c9edf-kolla-config\") pod \"openstack-galera-0\" (UID: \"bb5f88d2-6663-4ed6-a7a7-93ee500c9edf\") " pod="openstack/openstack-galera-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.117976 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/bb5f88d2-6663-4ed6-a7a7-93ee500c9edf-secrets\") pod \"openstack-galera-0\" (UID: \"bb5f88d2-6663-4ed6-a7a7-93ee500c9edf\") " pod="openstack/openstack-galera-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.118110 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/bb5f88d2-6663-4ed6-a7a7-93ee500c9edf-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"bb5f88d2-6663-4ed6-a7a7-93ee500c9edf\") " pod="openstack/openstack-galera-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.219559 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/00e4a196-0951-4c3a-9a1e-65e24cf2e6a0-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"00e4a196-0951-4c3a-9a1e-65e24cf2e6a0\") " pod="openstack/openstack-cell1-galera-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.219688 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/00e4a196-0951-4c3a-9a1e-65e24cf2e6a0-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"00e4a196-0951-4c3a-9a1e-65e24cf2e6a0\") " pod="openstack/openstack-cell1-galera-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.219726 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/00e4a196-0951-4c3a-9a1e-65e24cf2e6a0-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"00e4a196-0951-4c3a-9a1e-65e24cf2e6a0\") " pod="openstack/openstack-cell1-galera-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.219816 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/bb5f88d2-6663-4ed6-a7a7-93ee500c9edf-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"bb5f88d2-6663-4ed6-a7a7-93ee500c9edf\") " pod="openstack/openstack-galera-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.219929 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/00e4a196-0951-4c3a-9a1e-65e24cf2e6a0-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"00e4a196-0951-4c3a-9a1e-65e24cf2e6a0\") " pod="openstack/openstack-cell1-galera-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.220032 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bb5f88d2-6663-4ed6-a7a7-93ee500c9edf-operator-scripts\") pod \"openstack-galera-0\" (UID: \"bb5f88d2-6663-4ed6-a7a7-93ee500c9edf\") " pod="openstack/openstack-galera-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.220136 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/00e4a196-0951-4c3a-9a1e-65e24cf2e6a0-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"00e4a196-0951-4c3a-9a1e-65e24cf2e6a0\") " pod="openstack/openstack-cell1-galera-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.220210 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-galera-0\" (UID: \"bb5f88d2-6663-4ed6-a7a7-93ee500c9edf\") " pod="openstack/openstack-galera-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.220445 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"openstack-cell1-galera-0\" (UID: \"00e4a196-0951-4c3a-9a1e-65e24cf2e6a0\") " pod="openstack/openstack-cell1-galera-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.220591 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/bb5f88d2-6663-4ed6-a7a7-93ee500c9edf-config-data-generated\") pod \"openstack-galera-0\" (UID: \"bb5f88d2-6663-4ed6-a7a7-93ee500c9edf\") " pod="openstack/openstack-galera-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.220659 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/bb5f88d2-6663-4ed6-a7a7-93ee500c9edf-config-data-default\") pod \"openstack-galera-0\" (UID: \"bb5f88d2-6663-4ed6-a7a7-93ee500c9edf\") " pod="openstack/openstack-galera-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.220687 4779 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-galera-0\" (UID: \"bb5f88d2-6663-4ed6-a7a7-93ee500c9edf\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/openstack-galera-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.220728 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bb5f88d2-6663-4ed6-a7a7-93ee500c9edf-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"bb5f88d2-6663-4ed6-a7a7-93ee500c9edf\") " pod="openstack/openstack-galera-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.220781 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kfqjs\" (UniqueName: \"kubernetes.io/projected/bb5f88d2-6663-4ed6-a7a7-93ee500c9edf-kube-api-access-kfqjs\") pod \"openstack-galera-0\" (UID: \"bb5f88d2-6663-4ed6-a7a7-93ee500c9edf\") " pod="openstack/openstack-galera-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.220828 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00e4a196-0951-4c3a-9a1e-65e24cf2e6a0-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"00e4a196-0951-4c3a-9a1e-65e24cf2e6a0\") " pod="openstack/openstack-cell1-galera-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.220874 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/bb5f88d2-6663-4ed6-a7a7-93ee500c9edf-kolla-config\") pod \"openstack-galera-0\" (UID: \"bb5f88d2-6663-4ed6-a7a7-93ee500c9edf\") " pod="openstack/openstack-galera-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.220924 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7lnpp\" (UniqueName: \"kubernetes.io/projected/00e4a196-0951-4c3a-9a1e-65e24cf2e6a0-kube-api-access-7lnpp\") pod \"openstack-cell1-galera-0\" (UID: \"00e4a196-0951-4c3a-9a1e-65e24cf2e6a0\") " pod="openstack/openstack-cell1-galera-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.220961 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/00e4a196-0951-4c3a-9a1e-65e24cf2e6a0-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"00e4a196-0951-4c3a-9a1e-65e24cf2e6a0\") " pod="openstack/openstack-cell1-galera-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.221002 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/bb5f88d2-6663-4ed6-a7a7-93ee500c9edf-secrets\") pod \"openstack-galera-0\" (UID: \"bb5f88d2-6663-4ed6-a7a7-93ee500c9edf\") " pod="openstack/openstack-galera-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.221608 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/bb5f88d2-6663-4ed6-a7a7-93ee500c9edf-kolla-config\") pod \"openstack-galera-0\" (UID: \"bb5f88d2-6663-4ed6-a7a7-93ee500c9edf\") " pod="openstack/openstack-galera-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.221775 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/bb5f88d2-6663-4ed6-a7a7-93ee500c9edf-config-data-default\") pod \"openstack-galera-0\" (UID: \"bb5f88d2-6663-4ed6-a7a7-93ee500c9edf\") " pod="openstack/openstack-galera-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.222115 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/bb5f88d2-6663-4ed6-a7a7-93ee500c9edf-config-data-generated\") pod \"openstack-galera-0\" (UID: \"bb5f88d2-6663-4ed6-a7a7-93ee500c9edf\") " pod="openstack/openstack-galera-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.222524 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bb5f88d2-6663-4ed6-a7a7-93ee500c9edf-operator-scripts\") pod \"openstack-galera-0\" (UID: \"bb5f88d2-6663-4ed6-a7a7-93ee500c9edf\") " pod="openstack/openstack-galera-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.229531 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/bb5f88d2-6663-4ed6-a7a7-93ee500c9edf-secrets\") pod \"openstack-galera-0\" (UID: \"bb5f88d2-6663-4ed6-a7a7-93ee500c9edf\") " pod="openstack/openstack-galera-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.230338 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/bb5f88d2-6663-4ed6-a7a7-93ee500c9edf-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"bb5f88d2-6663-4ed6-a7a7-93ee500c9edf\") " pod="openstack/openstack-galera-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.241009 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kfqjs\" (UniqueName: \"kubernetes.io/projected/bb5f88d2-6663-4ed6-a7a7-93ee500c9edf-kube-api-access-kfqjs\") pod \"openstack-galera-0\" (UID: \"bb5f88d2-6663-4ed6-a7a7-93ee500c9edf\") " pod="openstack/openstack-galera-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.257296 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bb5f88d2-6663-4ed6-a7a7-93ee500c9edf-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"bb5f88d2-6663-4ed6-a7a7-93ee500c9edf\") " pod="openstack/openstack-galera-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.261849 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-galera-0\" (UID: \"bb5f88d2-6663-4ed6-a7a7-93ee500c9edf\") " pod="openstack/openstack-galera-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.287597 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.323647 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/00e4a196-0951-4c3a-9a1e-65e24cf2e6a0-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"00e4a196-0951-4c3a-9a1e-65e24cf2e6a0\") " pod="openstack/openstack-cell1-galera-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.323694 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/00e4a196-0951-4c3a-9a1e-65e24cf2e6a0-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"00e4a196-0951-4c3a-9a1e-65e24cf2e6a0\") " pod="openstack/openstack-cell1-galera-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.323729 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/00e4a196-0951-4c3a-9a1e-65e24cf2e6a0-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"00e4a196-0951-4c3a-9a1e-65e24cf2e6a0\") " pod="openstack/openstack-cell1-galera-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.323763 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/00e4a196-0951-4c3a-9a1e-65e24cf2e6a0-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"00e4a196-0951-4c3a-9a1e-65e24cf2e6a0\") " pod="openstack/openstack-cell1-galera-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.323809 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/00e4a196-0951-4c3a-9a1e-65e24cf2e6a0-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"00e4a196-0951-4c3a-9a1e-65e24cf2e6a0\") " pod="openstack/openstack-cell1-galera-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.323828 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"openstack-cell1-galera-0\" (UID: \"00e4a196-0951-4c3a-9a1e-65e24cf2e6a0\") " pod="openstack/openstack-cell1-galera-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.323870 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00e4a196-0951-4c3a-9a1e-65e24cf2e6a0-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"00e4a196-0951-4c3a-9a1e-65e24cf2e6a0\") " pod="openstack/openstack-cell1-galera-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.323893 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7lnpp\" (UniqueName: \"kubernetes.io/projected/00e4a196-0951-4c3a-9a1e-65e24cf2e6a0-kube-api-access-7lnpp\") pod \"openstack-cell1-galera-0\" (UID: \"00e4a196-0951-4c3a-9a1e-65e24cf2e6a0\") " pod="openstack/openstack-cell1-galera-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.323925 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/00e4a196-0951-4c3a-9a1e-65e24cf2e6a0-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"00e4a196-0951-4c3a-9a1e-65e24cf2e6a0\") " pod="openstack/openstack-cell1-galera-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.324995 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/00e4a196-0951-4c3a-9a1e-65e24cf2e6a0-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"00e4a196-0951-4c3a-9a1e-65e24cf2e6a0\") " pod="openstack/openstack-cell1-galera-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.325661 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/00e4a196-0951-4c3a-9a1e-65e24cf2e6a0-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"00e4a196-0951-4c3a-9a1e-65e24cf2e6a0\") " pod="openstack/openstack-cell1-galera-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.326104 4779 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"openstack-cell1-galera-0\" (UID: \"00e4a196-0951-4c3a-9a1e-65e24cf2e6a0\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/openstack-cell1-galera-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.329626 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/00e4a196-0951-4c3a-9a1e-65e24cf2e6a0-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"00e4a196-0951-4c3a-9a1e-65e24cf2e6a0\") " pod="openstack/openstack-cell1-galera-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.330891 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/00e4a196-0951-4c3a-9a1e-65e24cf2e6a0-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"00e4a196-0951-4c3a-9a1e-65e24cf2e6a0\") " pod="openstack/openstack-cell1-galera-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.333362 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/00e4a196-0951-4c3a-9a1e-65e24cf2e6a0-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"00e4a196-0951-4c3a-9a1e-65e24cf2e6a0\") " pod="openstack/openstack-cell1-galera-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.333391 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00e4a196-0951-4c3a-9a1e-65e24cf2e6a0-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"00e4a196-0951-4c3a-9a1e-65e24cf2e6a0\") " pod="openstack/openstack-cell1-galera-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.340636 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/00e4a196-0951-4c3a-9a1e-65e24cf2e6a0-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"00e4a196-0951-4c3a-9a1e-65e24cf2e6a0\") " pod="openstack/openstack-cell1-galera-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.349662 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7lnpp\" (UniqueName: \"kubernetes.io/projected/00e4a196-0951-4c3a-9a1e-65e24cf2e6a0-kube-api-access-7lnpp\") pod \"openstack-cell1-galera-0\" (UID: \"00e4a196-0951-4c3a-9a1e-65e24cf2e6a0\") " pod="openstack/openstack-cell1-galera-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.356449 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"openstack-cell1-galera-0\" (UID: \"00e4a196-0951-4c3a-9a1e-65e24cf2e6a0\") " pod="openstack/openstack-cell1-galera-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.388117 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.538372 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.539376 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.545538 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.546492 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-btvp8" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.546718 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.551615 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.628024 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/75fdeaf0-3dbf-4249-b03d-8b59289a2d58-memcached-tls-certs\") pod \"memcached-0\" (UID: \"75fdeaf0-3dbf-4249-b03d-8b59289a2d58\") " pod="openstack/memcached-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.628083 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75fdeaf0-3dbf-4249-b03d-8b59289a2d58-combined-ca-bundle\") pod \"memcached-0\" (UID: \"75fdeaf0-3dbf-4249-b03d-8b59289a2d58\") " pod="openstack/memcached-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.628219 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wj5z2\" (UniqueName: \"kubernetes.io/projected/75fdeaf0-3dbf-4249-b03d-8b59289a2d58-kube-api-access-wj5z2\") pod \"memcached-0\" (UID: \"75fdeaf0-3dbf-4249-b03d-8b59289a2d58\") " pod="openstack/memcached-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.628239 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/75fdeaf0-3dbf-4249-b03d-8b59289a2d58-config-data\") pod \"memcached-0\" (UID: \"75fdeaf0-3dbf-4249-b03d-8b59289a2d58\") " pod="openstack/memcached-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.628315 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/75fdeaf0-3dbf-4249-b03d-8b59289a2d58-kolla-config\") pod \"memcached-0\" (UID: \"75fdeaf0-3dbf-4249-b03d-8b59289a2d58\") " pod="openstack/memcached-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.730797 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75fdeaf0-3dbf-4249-b03d-8b59289a2d58-combined-ca-bundle\") pod \"memcached-0\" (UID: \"75fdeaf0-3dbf-4249-b03d-8b59289a2d58\") " pod="openstack/memcached-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.730936 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wj5z2\" (UniqueName: \"kubernetes.io/projected/75fdeaf0-3dbf-4249-b03d-8b59289a2d58-kube-api-access-wj5z2\") pod \"memcached-0\" (UID: \"75fdeaf0-3dbf-4249-b03d-8b59289a2d58\") " pod="openstack/memcached-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.730962 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/75fdeaf0-3dbf-4249-b03d-8b59289a2d58-config-data\") pod \"memcached-0\" (UID: \"75fdeaf0-3dbf-4249-b03d-8b59289a2d58\") " pod="openstack/memcached-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.731043 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/75fdeaf0-3dbf-4249-b03d-8b59289a2d58-kolla-config\") pod \"memcached-0\" (UID: \"75fdeaf0-3dbf-4249-b03d-8b59289a2d58\") " pod="openstack/memcached-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.731098 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/75fdeaf0-3dbf-4249-b03d-8b59289a2d58-memcached-tls-certs\") pod \"memcached-0\" (UID: \"75fdeaf0-3dbf-4249-b03d-8b59289a2d58\") " pod="openstack/memcached-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.731883 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/75fdeaf0-3dbf-4249-b03d-8b59289a2d58-kolla-config\") pod \"memcached-0\" (UID: \"75fdeaf0-3dbf-4249-b03d-8b59289a2d58\") " pod="openstack/memcached-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.732057 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/75fdeaf0-3dbf-4249-b03d-8b59289a2d58-config-data\") pod \"memcached-0\" (UID: \"75fdeaf0-3dbf-4249-b03d-8b59289a2d58\") " pod="openstack/memcached-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.734201 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75fdeaf0-3dbf-4249-b03d-8b59289a2d58-combined-ca-bundle\") pod \"memcached-0\" (UID: \"75fdeaf0-3dbf-4249-b03d-8b59289a2d58\") " pod="openstack/memcached-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.736249 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/75fdeaf0-3dbf-4249-b03d-8b59289a2d58-memcached-tls-certs\") pod \"memcached-0\" (UID: \"75fdeaf0-3dbf-4249-b03d-8b59289a2d58\") " pod="openstack/memcached-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.749025 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wj5z2\" (UniqueName: \"kubernetes.io/projected/75fdeaf0-3dbf-4249-b03d-8b59289a2d58-kube-api-access-wj5z2\") pod \"memcached-0\" (UID: \"75fdeaf0-3dbf-4249-b03d-8b59289a2d58\") " pod="openstack/memcached-0" Sep 29 09:45:08 crc kubenswrapper[4779]: I0929 09:45:08.855144 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Sep 29 09:45:10 crc kubenswrapper[4779]: I0929 09:45:10.334385 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 09:45:10 crc kubenswrapper[4779]: I0929 09:45:10.335710 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 29 09:45:10 crc kubenswrapper[4779]: I0929 09:45:10.339224 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-thzpt" Sep 29 09:45:10 crc kubenswrapper[4779]: I0929 09:45:10.348133 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 09:45:10 crc kubenswrapper[4779]: I0929 09:45:10.460713 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nj2p7\" (UniqueName: \"kubernetes.io/projected/eb6a2725-2f24-4e1f-9791-d544f59eddeb-kube-api-access-nj2p7\") pod \"kube-state-metrics-0\" (UID: \"eb6a2725-2f24-4e1f-9791-d544f59eddeb\") " pod="openstack/kube-state-metrics-0" Sep 29 09:45:10 crc kubenswrapper[4779]: I0929 09:45:10.564775 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nj2p7\" (UniqueName: \"kubernetes.io/projected/eb6a2725-2f24-4e1f-9791-d544f59eddeb-kube-api-access-nj2p7\") pod \"kube-state-metrics-0\" (UID: \"eb6a2725-2f24-4e1f-9791-d544f59eddeb\") " pod="openstack/kube-state-metrics-0" Sep 29 09:45:10 crc kubenswrapper[4779]: I0929 09:45:10.590991 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nj2p7\" (UniqueName: \"kubernetes.io/projected/eb6a2725-2f24-4e1f-9791-d544f59eddeb-kube-api-access-nj2p7\") pod \"kube-state-metrics-0\" (UID: \"eb6a2725-2f24-4e1f-9791-d544f59eddeb\") " pod="openstack/kube-state-metrics-0" Sep 29 09:45:10 crc kubenswrapper[4779]: I0929 09:45:10.668252 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 29 09:45:11 crc kubenswrapper[4779]: I0929 09:45:11.595314 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Sep 29 09:45:11 crc kubenswrapper[4779]: I0929 09:45:11.597633 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Sep 29 09:45:11 crc kubenswrapper[4779]: I0929 09:45:11.600450 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-zgl4h" Sep 29 09:45:11 crc kubenswrapper[4779]: I0929 09:45:11.603313 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Sep 29 09:45:11 crc kubenswrapper[4779]: I0929 09:45:11.603708 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Sep 29 09:45:11 crc kubenswrapper[4779]: I0929 09:45:11.603783 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Sep 29 09:45:11 crc kubenswrapper[4779]: I0929 09:45:11.605634 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Sep 29 09:45:11 crc kubenswrapper[4779]: I0929 09:45:11.606517 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Sep 29 09:45:11 crc kubenswrapper[4779]: I0929 09:45:11.608393 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Sep 29 09:45:11 crc kubenswrapper[4779]: I0929 09:45:11.681132 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/b3627e10-4513-49fd-bdf5-0a83db9d8561-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"b3627e10-4513-49fd-bdf5-0a83db9d8561\") " pod="openstack/prometheus-metric-storage-0" Sep 29 09:45:11 crc kubenswrapper[4779]: I0929 09:45:11.681172 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/b3627e10-4513-49fd-bdf5-0a83db9d8561-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"b3627e10-4513-49fd-bdf5-0a83db9d8561\") " pod="openstack/prometheus-metric-storage-0" Sep 29 09:45:11 crc kubenswrapper[4779]: I0929 09:45:11.681218 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b3627e10-4513-49fd-bdf5-0a83db9d8561-config\") pod \"prometheus-metric-storage-0\" (UID: \"b3627e10-4513-49fd-bdf5-0a83db9d8561\") " pod="openstack/prometheus-metric-storage-0" Sep 29 09:45:11 crc kubenswrapper[4779]: I0929 09:45:11.681309 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/b3627e10-4513-49fd-bdf5-0a83db9d8561-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"b3627e10-4513-49fd-bdf5-0a83db9d8561\") " pod="openstack/prometheus-metric-storage-0" Sep 29 09:45:11 crc kubenswrapper[4779]: I0929 09:45:11.681342 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/b3627e10-4513-49fd-bdf5-0a83db9d8561-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"b3627e10-4513-49fd-bdf5-0a83db9d8561\") " pod="openstack/prometheus-metric-storage-0" Sep 29 09:45:11 crc kubenswrapper[4779]: I0929 09:45:11.681363 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xnkhx\" (UniqueName: \"kubernetes.io/projected/b3627e10-4513-49fd-bdf5-0a83db9d8561-kube-api-access-xnkhx\") pod \"prometheus-metric-storage-0\" (UID: \"b3627e10-4513-49fd-bdf5-0a83db9d8561\") " pod="openstack/prometheus-metric-storage-0" Sep 29 09:45:11 crc kubenswrapper[4779]: I0929 09:45:11.681385 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/b3627e10-4513-49fd-bdf5-0a83db9d8561-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"b3627e10-4513-49fd-bdf5-0a83db9d8561\") " pod="openstack/prometheus-metric-storage-0" Sep 29 09:45:11 crc kubenswrapper[4779]: I0929 09:45:11.681407 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-839bd8cf-b5d6-43ab-b010-7b66d2eb87b8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-839bd8cf-b5d6-43ab-b010-7b66d2eb87b8\") pod \"prometheus-metric-storage-0\" (UID: \"b3627e10-4513-49fd-bdf5-0a83db9d8561\") " pod="openstack/prometheus-metric-storage-0" Sep 29 09:45:11 crc kubenswrapper[4779]: I0929 09:45:11.782797 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/b3627e10-4513-49fd-bdf5-0a83db9d8561-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"b3627e10-4513-49fd-bdf5-0a83db9d8561\") " pod="openstack/prometheus-metric-storage-0" Sep 29 09:45:11 crc kubenswrapper[4779]: I0929 09:45:11.782848 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/b3627e10-4513-49fd-bdf5-0a83db9d8561-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"b3627e10-4513-49fd-bdf5-0a83db9d8561\") " pod="openstack/prometheus-metric-storage-0" Sep 29 09:45:11 crc kubenswrapper[4779]: I0929 09:45:11.782885 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b3627e10-4513-49fd-bdf5-0a83db9d8561-config\") pod \"prometheus-metric-storage-0\" (UID: \"b3627e10-4513-49fd-bdf5-0a83db9d8561\") " pod="openstack/prometheus-metric-storage-0" Sep 29 09:45:11 crc kubenswrapper[4779]: I0929 09:45:11.782947 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/b3627e10-4513-49fd-bdf5-0a83db9d8561-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"b3627e10-4513-49fd-bdf5-0a83db9d8561\") " pod="openstack/prometheus-metric-storage-0" Sep 29 09:45:11 crc kubenswrapper[4779]: I0929 09:45:11.783003 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/b3627e10-4513-49fd-bdf5-0a83db9d8561-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"b3627e10-4513-49fd-bdf5-0a83db9d8561\") " pod="openstack/prometheus-metric-storage-0" Sep 29 09:45:11 crc kubenswrapper[4779]: I0929 09:45:11.783037 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xnkhx\" (UniqueName: \"kubernetes.io/projected/b3627e10-4513-49fd-bdf5-0a83db9d8561-kube-api-access-xnkhx\") pod \"prometheus-metric-storage-0\" (UID: \"b3627e10-4513-49fd-bdf5-0a83db9d8561\") " pod="openstack/prometheus-metric-storage-0" Sep 29 09:45:11 crc kubenswrapper[4779]: I0929 09:45:11.783083 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/b3627e10-4513-49fd-bdf5-0a83db9d8561-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"b3627e10-4513-49fd-bdf5-0a83db9d8561\") " pod="openstack/prometheus-metric-storage-0" Sep 29 09:45:11 crc kubenswrapper[4779]: I0929 09:45:11.783118 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-839bd8cf-b5d6-43ab-b010-7b66d2eb87b8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-839bd8cf-b5d6-43ab-b010-7b66d2eb87b8\") pod \"prometheus-metric-storage-0\" (UID: \"b3627e10-4513-49fd-bdf5-0a83db9d8561\") " pod="openstack/prometheus-metric-storage-0" Sep 29 09:45:11 crc kubenswrapper[4779]: I0929 09:45:11.784047 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/b3627e10-4513-49fd-bdf5-0a83db9d8561-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"b3627e10-4513-49fd-bdf5-0a83db9d8561\") " pod="openstack/prometheus-metric-storage-0" Sep 29 09:45:11 crc kubenswrapper[4779]: I0929 09:45:11.787340 4779 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 29 09:45:11 crc kubenswrapper[4779]: I0929 09:45:11.787389 4779 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-839bd8cf-b5d6-43ab-b010-7b66d2eb87b8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-839bd8cf-b5d6-43ab-b010-7b66d2eb87b8\") pod \"prometheus-metric-storage-0\" (UID: \"b3627e10-4513-49fd-bdf5-0a83db9d8561\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/d7c28a7a22cfe960b99bbb5b934acd1f650db36f185879457a9343b648a1e5b0/globalmount\"" pod="openstack/prometheus-metric-storage-0" Sep 29 09:45:11 crc kubenswrapper[4779]: I0929 09:45:11.788758 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/b3627e10-4513-49fd-bdf5-0a83db9d8561-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"b3627e10-4513-49fd-bdf5-0a83db9d8561\") " pod="openstack/prometheus-metric-storage-0" Sep 29 09:45:11 crc kubenswrapper[4779]: I0929 09:45:11.789145 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/b3627e10-4513-49fd-bdf5-0a83db9d8561-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"b3627e10-4513-49fd-bdf5-0a83db9d8561\") " pod="openstack/prometheus-metric-storage-0" Sep 29 09:45:11 crc kubenswrapper[4779]: I0929 09:45:11.796615 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/b3627e10-4513-49fd-bdf5-0a83db9d8561-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"b3627e10-4513-49fd-bdf5-0a83db9d8561\") " pod="openstack/prometheus-metric-storage-0" Sep 29 09:45:11 crc kubenswrapper[4779]: I0929 09:45:11.799832 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/b3627e10-4513-49fd-bdf5-0a83db9d8561-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"b3627e10-4513-49fd-bdf5-0a83db9d8561\") " pod="openstack/prometheus-metric-storage-0" Sep 29 09:45:11 crc kubenswrapper[4779]: I0929 09:45:11.800645 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xnkhx\" (UniqueName: \"kubernetes.io/projected/b3627e10-4513-49fd-bdf5-0a83db9d8561-kube-api-access-xnkhx\") pod \"prometheus-metric-storage-0\" (UID: \"b3627e10-4513-49fd-bdf5-0a83db9d8561\") " pod="openstack/prometheus-metric-storage-0" Sep 29 09:45:11 crc kubenswrapper[4779]: I0929 09:45:11.801723 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/b3627e10-4513-49fd-bdf5-0a83db9d8561-config\") pod \"prometheus-metric-storage-0\" (UID: \"b3627e10-4513-49fd-bdf5-0a83db9d8561\") " pod="openstack/prometheus-metric-storage-0" Sep 29 09:45:11 crc kubenswrapper[4779]: I0929 09:45:11.816717 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-839bd8cf-b5d6-43ab-b010-7b66d2eb87b8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-839bd8cf-b5d6-43ab-b010-7b66d2eb87b8\") pod \"prometheus-metric-storage-0\" (UID: \"b3627e10-4513-49fd-bdf5-0a83db9d8561\") " pod="openstack/prometheus-metric-storage-0" Sep 29 09:45:11 crc kubenswrapper[4779]: I0929 09:45:11.917898 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Sep 29 09:45:13 crc kubenswrapper[4779]: I0929 09:45:13.565363 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-dzjsq"] Sep 29 09:45:13 crc kubenswrapper[4779]: I0929 09:45:13.567012 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-dzjsq" Sep 29 09:45:13 crc kubenswrapper[4779]: I0929 09:45:13.571340 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-qmb7z" Sep 29 09:45:13 crc kubenswrapper[4779]: I0929 09:45:13.571664 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Sep 29 09:45:13 crc kubenswrapper[4779]: I0929 09:45:13.571840 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Sep 29 09:45:13 crc kubenswrapper[4779]: I0929 09:45:13.581582 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-bbznl"] Sep 29 09:45:13 crc kubenswrapper[4779]: I0929 09:45:13.583709 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-bbznl" Sep 29 09:45:13 crc kubenswrapper[4779]: I0929 09:45:13.593179 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-dzjsq"] Sep 29 09:45:13 crc kubenswrapper[4779]: I0929 09:45:13.625348 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-bbznl"] Sep 29 09:45:13 crc kubenswrapper[4779]: I0929 09:45:13.713794 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a2745c65-9c13-46fa-b3e6-37731ad17208-scripts\") pod \"ovn-controller-ovs-bbznl\" (UID: \"a2745c65-9c13-46fa-b3e6-37731ad17208\") " pod="openstack/ovn-controller-ovs-bbznl" Sep 29 09:45:13 crc kubenswrapper[4779]: I0929 09:45:13.713832 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/a2745c65-9c13-46fa-b3e6-37731ad17208-var-log\") pod \"ovn-controller-ovs-bbznl\" (UID: \"a2745c65-9c13-46fa-b3e6-37731ad17208\") " pod="openstack/ovn-controller-ovs-bbznl" Sep 29 09:45:13 crc kubenswrapper[4779]: I0929 09:45:13.713859 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/290bdf85-850a-4b79-85f7-dc2e662e0ae9-var-run-ovn\") pod \"ovn-controller-dzjsq\" (UID: \"290bdf85-850a-4b79-85f7-dc2e662e0ae9\") " pod="openstack/ovn-controller-dzjsq" Sep 29 09:45:13 crc kubenswrapper[4779]: I0929 09:45:13.713876 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/290bdf85-850a-4b79-85f7-dc2e662e0ae9-ovn-controller-tls-certs\") pod \"ovn-controller-dzjsq\" (UID: \"290bdf85-850a-4b79-85f7-dc2e662e0ae9\") " pod="openstack/ovn-controller-dzjsq" Sep 29 09:45:13 crc kubenswrapper[4779]: I0929 09:45:13.713954 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/290bdf85-850a-4b79-85f7-dc2e662e0ae9-combined-ca-bundle\") pod \"ovn-controller-dzjsq\" (UID: \"290bdf85-850a-4b79-85f7-dc2e662e0ae9\") " pod="openstack/ovn-controller-dzjsq" Sep 29 09:45:13 crc kubenswrapper[4779]: I0929 09:45:13.713988 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/a2745c65-9c13-46fa-b3e6-37731ad17208-etc-ovs\") pod \"ovn-controller-ovs-bbznl\" (UID: \"a2745c65-9c13-46fa-b3e6-37731ad17208\") " pod="openstack/ovn-controller-ovs-bbznl" Sep 29 09:45:13 crc kubenswrapper[4779]: I0929 09:45:13.714016 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/a2745c65-9c13-46fa-b3e6-37731ad17208-var-lib\") pod \"ovn-controller-ovs-bbznl\" (UID: \"a2745c65-9c13-46fa-b3e6-37731ad17208\") " pod="openstack/ovn-controller-ovs-bbznl" Sep 29 09:45:13 crc kubenswrapper[4779]: I0929 09:45:13.714099 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/a2745c65-9c13-46fa-b3e6-37731ad17208-var-run\") pod \"ovn-controller-ovs-bbznl\" (UID: \"a2745c65-9c13-46fa-b3e6-37731ad17208\") " pod="openstack/ovn-controller-ovs-bbznl" Sep 29 09:45:13 crc kubenswrapper[4779]: I0929 09:45:13.714150 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/290bdf85-850a-4b79-85f7-dc2e662e0ae9-var-run\") pod \"ovn-controller-dzjsq\" (UID: \"290bdf85-850a-4b79-85f7-dc2e662e0ae9\") " pod="openstack/ovn-controller-dzjsq" Sep 29 09:45:13 crc kubenswrapper[4779]: I0929 09:45:13.714176 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/290bdf85-850a-4b79-85f7-dc2e662e0ae9-var-log-ovn\") pod \"ovn-controller-dzjsq\" (UID: \"290bdf85-850a-4b79-85f7-dc2e662e0ae9\") " pod="openstack/ovn-controller-dzjsq" Sep 29 09:45:13 crc kubenswrapper[4779]: I0929 09:45:13.714204 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-89c7v\" (UniqueName: \"kubernetes.io/projected/a2745c65-9c13-46fa-b3e6-37731ad17208-kube-api-access-89c7v\") pod \"ovn-controller-ovs-bbznl\" (UID: \"a2745c65-9c13-46fa-b3e6-37731ad17208\") " pod="openstack/ovn-controller-ovs-bbznl" Sep 29 09:45:13 crc kubenswrapper[4779]: I0929 09:45:13.714240 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/290bdf85-850a-4b79-85f7-dc2e662e0ae9-scripts\") pod \"ovn-controller-dzjsq\" (UID: \"290bdf85-850a-4b79-85f7-dc2e662e0ae9\") " pod="openstack/ovn-controller-dzjsq" Sep 29 09:45:13 crc kubenswrapper[4779]: I0929 09:45:13.714289 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j5b2f\" (UniqueName: \"kubernetes.io/projected/290bdf85-850a-4b79-85f7-dc2e662e0ae9-kube-api-access-j5b2f\") pod \"ovn-controller-dzjsq\" (UID: \"290bdf85-850a-4b79-85f7-dc2e662e0ae9\") " pod="openstack/ovn-controller-dzjsq" Sep 29 09:45:13 crc kubenswrapper[4779]: I0929 09:45:13.815622 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/a2745c65-9c13-46fa-b3e6-37731ad17208-etc-ovs\") pod \"ovn-controller-ovs-bbznl\" (UID: \"a2745c65-9c13-46fa-b3e6-37731ad17208\") " pod="openstack/ovn-controller-ovs-bbznl" Sep 29 09:45:13 crc kubenswrapper[4779]: I0929 09:45:13.815666 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/a2745c65-9c13-46fa-b3e6-37731ad17208-var-lib\") pod \"ovn-controller-ovs-bbznl\" (UID: \"a2745c65-9c13-46fa-b3e6-37731ad17208\") " pod="openstack/ovn-controller-ovs-bbznl" Sep 29 09:45:13 crc kubenswrapper[4779]: I0929 09:45:13.815705 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/a2745c65-9c13-46fa-b3e6-37731ad17208-var-run\") pod \"ovn-controller-ovs-bbznl\" (UID: \"a2745c65-9c13-46fa-b3e6-37731ad17208\") " pod="openstack/ovn-controller-ovs-bbznl" Sep 29 09:45:13 crc kubenswrapper[4779]: I0929 09:45:13.815744 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/290bdf85-850a-4b79-85f7-dc2e662e0ae9-var-run\") pod \"ovn-controller-dzjsq\" (UID: \"290bdf85-850a-4b79-85f7-dc2e662e0ae9\") " pod="openstack/ovn-controller-dzjsq" Sep 29 09:45:13 crc kubenswrapper[4779]: I0929 09:45:13.815767 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/290bdf85-850a-4b79-85f7-dc2e662e0ae9-var-log-ovn\") pod \"ovn-controller-dzjsq\" (UID: \"290bdf85-850a-4b79-85f7-dc2e662e0ae9\") " pod="openstack/ovn-controller-dzjsq" Sep 29 09:45:13 crc kubenswrapper[4779]: I0929 09:45:13.815793 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-89c7v\" (UniqueName: \"kubernetes.io/projected/a2745c65-9c13-46fa-b3e6-37731ad17208-kube-api-access-89c7v\") pod \"ovn-controller-ovs-bbznl\" (UID: \"a2745c65-9c13-46fa-b3e6-37731ad17208\") " pod="openstack/ovn-controller-ovs-bbznl" Sep 29 09:45:13 crc kubenswrapper[4779]: I0929 09:45:13.815811 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/290bdf85-850a-4b79-85f7-dc2e662e0ae9-scripts\") pod \"ovn-controller-dzjsq\" (UID: \"290bdf85-850a-4b79-85f7-dc2e662e0ae9\") " pod="openstack/ovn-controller-dzjsq" Sep 29 09:45:13 crc kubenswrapper[4779]: I0929 09:45:13.815828 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j5b2f\" (UniqueName: \"kubernetes.io/projected/290bdf85-850a-4b79-85f7-dc2e662e0ae9-kube-api-access-j5b2f\") pod \"ovn-controller-dzjsq\" (UID: \"290bdf85-850a-4b79-85f7-dc2e662e0ae9\") " pod="openstack/ovn-controller-dzjsq" Sep 29 09:45:13 crc kubenswrapper[4779]: I0929 09:45:13.815850 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a2745c65-9c13-46fa-b3e6-37731ad17208-scripts\") pod \"ovn-controller-ovs-bbznl\" (UID: \"a2745c65-9c13-46fa-b3e6-37731ad17208\") " pod="openstack/ovn-controller-ovs-bbznl" Sep 29 09:45:13 crc kubenswrapper[4779]: I0929 09:45:13.815868 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/a2745c65-9c13-46fa-b3e6-37731ad17208-var-log\") pod \"ovn-controller-ovs-bbznl\" (UID: \"a2745c65-9c13-46fa-b3e6-37731ad17208\") " pod="openstack/ovn-controller-ovs-bbznl" Sep 29 09:45:13 crc kubenswrapper[4779]: I0929 09:45:13.815885 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/290bdf85-850a-4b79-85f7-dc2e662e0ae9-var-run-ovn\") pod \"ovn-controller-dzjsq\" (UID: \"290bdf85-850a-4b79-85f7-dc2e662e0ae9\") " pod="openstack/ovn-controller-dzjsq" Sep 29 09:45:13 crc kubenswrapper[4779]: I0929 09:45:13.815916 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/290bdf85-850a-4b79-85f7-dc2e662e0ae9-ovn-controller-tls-certs\") pod \"ovn-controller-dzjsq\" (UID: \"290bdf85-850a-4b79-85f7-dc2e662e0ae9\") " pod="openstack/ovn-controller-dzjsq" Sep 29 09:45:13 crc kubenswrapper[4779]: I0929 09:45:13.815941 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/290bdf85-850a-4b79-85f7-dc2e662e0ae9-combined-ca-bundle\") pod \"ovn-controller-dzjsq\" (UID: \"290bdf85-850a-4b79-85f7-dc2e662e0ae9\") " pod="openstack/ovn-controller-dzjsq" Sep 29 09:45:13 crc kubenswrapper[4779]: I0929 09:45:13.816261 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/a2745c65-9c13-46fa-b3e6-37731ad17208-etc-ovs\") pod \"ovn-controller-ovs-bbznl\" (UID: \"a2745c65-9c13-46fa-b3e6-37731ad17208\") " pod="openstack/ovn-controller-ovs-bbznl" Sep 29 09:45:13 crc kubenswrapper[4779]: I0929 09:45:13.816389 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/a2745c65-9c13-46fa-b3e6-37731ad17208-var-lib\") pod \"ovn-controller-ovs-bbznl\" (UID: \"a2745c65-9c13-46fa-b3e6-37731ad17208\") " pod="openstack/ovn-controller-ovs-bbznl" Sep 29 09:45:13 crc kubenswrapper[4779]: I0929 09:45:13.816582 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/290bdf85-850a-4b79-85f7-dc2e662e0ae9-var-run\") pod \"ovn-controller-dzjsq\" (UID: \"290bdf85-850a-4b79-85f7-dc2e662e0ae9\") " pod="openstack/ovn-controller-dzjsq" Sep 29 09:45:13 crc kubenswrapper[4779]: I0929 09:45:13.816649 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/a2745c65-9c13-46fa-b3e6-37731ad17208-var-run\") pod \"ovn-controller-ovs-bbznl\" (UID: \"a2745c65-9c13-46fa-b3e6-37731ad17208\") " pod="openstack/ovn-controller-ovs-bbznl" Sep 29 09:45:13 crc kubenswrapper[4779]: I0929 09:45:13.816791 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/290bdf85-850a-4b79-85f7-dc2e662e0ae9-var-log-ovn\") pod \"ovn-controller-dzjsq\" (UID: \"290bdf85-850a-4b79-85f7-dc2e662e0ae9\") " pod="openstack/ovn-controller-dzjsq" Sep 29 09:45:13 crc kubenswrapper[4779]: I0929 09:45:13.816897 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/a2745c65-9c13-46fa-b3e6-37731ad17208-var-log\") pod \"ovn-controller-ovs-bbznl\" (UID: \"a2745c65-9c13-46fa-b3e6-37731ad17208\") " pod="openstack/ovn-controller-ovs-bbznl" Sep 29 09:45:13 crc kubenswrapper[4779]: I0929 09:45:13.818369 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/290bdf85-850a-4b79-85f7-dc2e662e0ae9-scripts\") pod \"ovn-controller-dzjsq\" (UID: \"290bdf85-850a-4b79-85f7-dc2e662e0ae9\") " pod="openstack/ovn-controller-dzjsq" Sep 29 09:45:13 crc kubenswrapper[4779]: I0929 09:45:13.832347 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/290bdf85-850a-4b79-85f7-dc2e662e0ae9-var-run-ovn\") pod \"ovn-controller-dzjsq\" (UID: \"290bdf85-850a-4b79-85f7-dc2e662e0ae9\") " pod="openstack/ovn-controller-dzjsq" Sep 29 09:45:13 crc kubenswrapper[4779]: I0929 09:45:13.833992 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a2745c65-9c13-46fa-b3e6-37731ad17208-scripts\") pod \"ovn-controller-ovs-bbznl\" (UID: \"a2745c65-9c13-46fa-b3e6-37731ad17208\") " pod="openstack/ovn-controller-ovs-bbznl" Sep 29 09:45:13 crc kubenswrapper[4779]: I0929 09:45:13.836170 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/290bdf85-850a-4b79-85f7-dc2e662e0ae9-combined-ca-bundle\") pod \"ovn-controller-dzjsq\" (UID: \"290bdf85-850a-4b79-85f7-dc2e662e0ae9\") " pod="openstack/ovn-controller-dzjsq" Sep 29 09:45:13 crc kubenswrapper[4779]: I0929 09:45:13.837829 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/290bdf85-850a-4b79-85f7-dc2e662e0ae9-ovn-controller-tls-certs\") pod \"ovn-controller-dzjsq\" (UID: \"290bdf85-850a-4b79-85f7-dc2e662e0ae9\") " pod="openstack/ovn-controller-dzjsq" Sep 29 09:45:13 crc kubenswrapper[4779]: I0929 09:45:13.839084 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-89c7v\" (UniqueName: \"kubernetes.io/projected/a2745c65-9c13-46fa-b3e6-37731ad17208-kube-api-access-89c7v\") pod \"ovn-controller-ovs-bbznl\" (UID: \"a2745c65-9c13-46fa-b3e6-37731ad17208\") " pod="openstack/ovn-controller-ovs-bbznl" Sep 29 09:45:13 crc kubenswrapper[4779]: I0929 09:45:13.839478 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j5b2f\" (UniqueName: \"kubernetes.io/projected/290bdf85-850a-4b79-85f7-dc2e662e0ae9-kube-api-access-j5b2f\") pod \"ovn-controller-dzjsq\" (UID: \"290bdf85-850a-4b79-85f7-dc2e662e0ae9\") " pod="openstack/ovn-controller-dzjsq" Sep 29 09:45:13 crc kubenswrapper[4779]: I0929 09:45:13.893817 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-dzjsq" Sep 29 09:45:13 crc kubenswrapper[4779]: I0929 09:45:13.926325 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-bbznl" Sep 29 09:45:14 crc kubenswrapper[4779]: I0929 09:45:14.458451 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Sep 29 09:45:14 crc kubenswrapper[4779]: I0929 09:45:14.460336 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Sep 29 09:45:14 crc kubenswrapper[4779]: I0929 09:45:14.462517 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Sep 29 09:45:14 crc kubenswrapper[4779]: I0929 09:45:14.462970 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-f7cwr" Sep 29 09:45:14 crc kubenswrapper[4779]: I0929 09:45:14.463199 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Sep 29 09:45:14 crc kubenswrapper[4779]: I0929 09:45:14.463754 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Sep 29 09:45:14 crc kubenswrapper[4779]: I0929 09:45:14.464067 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Sep 29 09:45:14 crc kubenswrapper[4779]: I0929 09:45:14.474460 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Sep 29 09:45:14 crc kubenswrapper[4779]: I0929 09:45:14.531058 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/65145612-cd78-4a1f-84c3-ea831e0c83b0-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"65145612-cd78-4a1f-84c3-ea831e0c83b0\") " pod="openstack/ovsdbserver-nb-0" Sep 29 09:45:14 crc kubenswrapper[4779]: I0929 09:45:14.531120 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65145612-cd78-4a1f-84c3-ea831e0c83b0-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"65145612-cd78-4a1f-84c3-ea831e0c83b0\") " pod="openstack/ovsdbserver-nb-0" Sep 29 09:45:14 crc kubenswrapper[4779]: I0929 09:45:14.531184 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dbckw\" (UniqueName: \"kubernetes.io/projected/65145612-cd78-4a1f-84c3-ea831e0c83b0-kube-api-access-dbckw\") pod \"ovsdbserver-nb-0\" (UID: \"65145612-cd78-4a1f-84c3-ea831e0c83b0\") " pod="openstack/ovsdbserver-nb-0" Sep 29 09:45:14 crc kubenswrapper[4779]: I0929 09:45:14.531232 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/65145612-cd78-4a1f-84c3-ea831e0c83b0-config\") pod \"ovsdbserver-nb-0\" (UID: \"65145612-cd78-4a1f-84c3-ea831e0c83b0\") " pod="openstack/ovsdbserver-nb-0" Sep 29 09:45:14 crc kubenswrapper[4779]: I0929 09:45:14.531289 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/65145612-cd78-4a1f-84c3-ea831e0c83b0-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"65145612-cd78-4a1f-84c3-ea831e0c83b0\") " pod="openstack/ovsdbserver-nb-0" Sep 29 09:45:14 crc kubenswrapper[4779]: I0929 09:45:14.531316 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"ovsdbserver-nb-0\" (UID: \"65145612-cd78-4a1f-84c3-ea831e0c83b0\") " pod="openstack/ovsdbserver-nb-0" Sep 29 09:45:14 crc kubenswrapper[4779]: I0929 09:45:14.531381 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/65145612-cd78-4a1f-84c3-ea831e0c83b0-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"65145612-cd78-4a1f-84c3-ea831e0c83b0\") " pod="openstack/ovsdbserver-nb-0" Sep 29 09:45:14 crc kubenswrapper[4779]: I0929 09:45:14.531410 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/65145612-cd78-4a1f-84c3-ea831e0c83b0-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"65145612-cd78-4a1f-84c3-ea831e0c83b0\") " pod="openstack/ovsdbserver-nb-0" Sep 29 09:45:14 crc kubenswrapper[4779]: I0929 09:45:14.632558 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/65145612-cd78-4a1f-84c3-ea831e0c83b0-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"65145612-cd78-4a1f-84c3-ea831e0c83b0\") " pod="openstack/ovsdbserver-nb-0" Sep 29 09:45:14 crc kubenswrapper[4779]: I0929 09:45:14.632602 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"ovsdbserver-nb-0\" (UID: \"65145612-cd78-4a1f-84c3-ea831e0c83b0\") " pod="openstack/ovsdbserver-nb-0" Sep 29 09:45:14 crc kubenswrapper[4779]: I0929 09:45:14.632660 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/65145612-cd78-4a1f-84c3-ea831e0c83b0-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"65145612-cd78-4a1f-84c3-ea831e0c83b0\") " pod="openstack/ovsdbserver-nb-0" Sep 29 09:45:14 crc kubenswrapper[4779]: I0929 09:45:14.632685 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/65145612-cd78-4a1f-84c3-ea831e0c83b0-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"65145612-cd78-4a1f-84c3-ea831e0c83b0\") " pod="openstack/ovsdbserver-nb-0" Sep 29 09:45:14 crc kubenswrapper[4779]: I0929 09:45:14.632718 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/65145612-cd78-4a1f-84c3-ea831e0c83b0-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"65145612-cd78-4a1f-84c3-ea831e0c83b0\") " pod="openstack/ovsdbserver-nb-0" Sep 29 09:45:14 crc kubenswrapper[4779]: I0929 09:45:14.632751 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65145612-cd78-4a1f-84c3-ea831e0c83b0-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"65145612-cd78-4a1f-84c3-ea831e0c83b0\") " pod="openstack/ovsdbserver-nb-0" Sep 29 09:45:14 crc kubenswrapper[4779]: I0929 09:45:14.632802 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dbckw\" (UniqueName: \"kubernetes.io/projected/65145612-cd78-4a1f-84c3-ea831e0c83b0-kube-api-access-dbckw\") pod \"ovsdbserver-nb-0\" (UID: \"65145612-cd78-4a1f-84c3-ea831e0c83b0\") " pod="openstack/ovsdbserver-nb-0" Sep 29 09:45:14 crc kubenswrapper[4779]: I0929 09:45:14.632821 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/65145612-cd78-4a1f-84c3-ea831e0c83b0-config\") pod \"ovsdbserver-nb-0\" (UID: \"65145612-cd78-4a1f-84c3-ea831e0c83b0\") " pod="openstack/ovsdbserver-nb-0" Sep 29 09:45:14 crc kubenswrapper[4779]: I0929 09:45:14.632984 4779 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"ovsdbserver-nb-0\" (UID: \"65145612-cd78-4a1f-84c3-ea831e0c83b0\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/ovsdbserver-nb-0" Sep 29 09:45:14 crc kubenswrapper[4779]: I0929 09:45:14.633519 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/65145612-cd78-4a1f-84c3-ea831e0c83b0-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"65145612-cd78-4a1f-84c3-ea831e0c83b0\") " pod="openstack/ovsdbserver-nb-0" Sep 29 09:45:14 crc kubenswrapper[4779]: I0929 09:45:14.633671 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/65145612-cd78-4a1f-84c3-ea831e0c83b0-config\") pod \"ovsdbserver-nb-0\" (UID: \"65145612-cd78-4a1f-84c3-ea831e0c83b0\") " pod="openstack/ovsdbserver-nb-0" Sep 29 09:45:14 crc kubenswrapper[4779]: I0929 09:45:14.633744 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/65145612-cd78-4a1f-84c3-ea831e0c83b0-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"65145612-cd78-4a1f-84c3-ea831e0c83b0\") " pod="openstack/ovsdbserver-nb-0" Sep 29 09:45:14 crc kubenswrapper[4779]: I0929 09:45:14.639447 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/65145612-cd78-4a1f-84c3-ea831e0c83b0-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"65145612-cd78-4a1f-84c3-ea831e0c83b0\") " pod="openstack/ovsdbserver-nb-0" Sep 29 09:45:14 crc kubenswrapper[4779]: I0929 09:45:14.639982 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65145612-cd78-4a1f-84c3-ea831e0c83b0-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"65145612-cd78-4a1f-84c3-ea831e0c83b0\") " pod="openstack/ovsdbserver-nb-0" Sep 29 09:45:14 crc kubenswrapper[4779]: I0929 09:45:14.641104 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/65145612-cd78-4a1f-84c3-ea831e0c83b0-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"65145612-cd78-4a1f-84c3-ea831e0c83b0\") " pod="openstack/ovsdbserver-nb-0" Sep 29 09:45:14 crc kubenswrapper[4779]: I0929 09:45:14.658181 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"ovsdbserver-nb-0\" (UID: \"65145612-cd78-4a1f-84c3-ea831e0c83b0\") " pod="openstack/ovsdbserver-nb-0" Sep 29 09:45:14 crc kubenswrapper[4779]: I0929 09:45:14.660637 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dbckw\" (UniqueName: \"kubernetes.io/projected/65145612-cd78-4a1f-84c3-ea831e0c83b0-kube-api-access-dbckw\") pod \"ovsdbserver-nb-0\" (UID: \"65145612-cd78-4a1f-84c3-ea831e0c83b0\") " pod="openstack/ovsdbserver-nb-0" Sep 29 09:45:14 crc kubenswrapper[4779]: I0929 09:45:14.785505 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Sep 29 09:45:17 crc kubenswrapper[4779]: E0929 09:45:17.029370 4779 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.175:5001/podified-master-centos10/openstack-neutron-server:watcher_latest" Sep 29 09:45:17 crc kubenswrapper[4779]: E0929 09:45:17.029676 4779 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.175:5001/podified-master-centos10/openstack-neutron-server:watcher_latest" Sep 29 09:45:17 crc kubenswrapper[4779]: E0929 09:45:17.029786 4779 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:38.102.83.175:5001/podified-master-centos10/openstack-neutron-server:watcher_latest,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-dsmzl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-5fb658dc65-qf22m_openstack(2edf572a-5bab-41c3-8ff8-a668fb8a5893): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 09:45:17 crc kubenswrapper[4779]: E0929 09:45:17.031369 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-5fb658dc65-qf22m" podUID="2edf572a-5bab-41c3-8ff8-a668fb8a5893" Sep 29 09:45:17 crc kubenswrapper[4779]: E0929 09:45:17.050563 4779 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.175:5001/podified-master-centos10/openstack-neutron-server:watcher_latest" Sep 29 09:45:17 crc kubenswrapper[4779]: E0929 09:45:17.050621 4779 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.175:5001/podified-master-centos10/openstack-neutron-server:watcher_latest" Sep 29 09:45:17 crc kubenswrapper[4779]: E0929 09:45:17.050958 4779 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:38.102.83.175:5001/podified-master-centos10/openstack-neutron-server:watcher_latest,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-sx8b8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-7687d5c5c-92rcn_openstack(39c36338-59ac-4873-9218-f99df307efc3): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 09:45:17 crc kubenswrapper[4779]: E0929 09:45:17.053078 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-7687d5c5c-92rcn" podUID="39c36338-59ac-4873-9218-f99df307efc3" Sep 29 09:45:17 crc kubenswrapper[4779]: I0929 09:45:17.931145 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Sep 29 09:45:17 crc kubenswrapper[4779]: I0929 09:45:17.933113 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Sep 29 09:45:17 crc kubenswrapper[4779]: I0929 09:45:17.935137 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-fdn6s" Sep 29 09:45:17 crc kubenswrapper[4779]: I0929 09:45:17.935589 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Sep 29 09:45:17 crc kubenswrapper[4779]: I0929 09:45:17.936037 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Sep 29 09:45:17 crc kubenswrapper[4779]: I0929 09:45:17.936281 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Sep 29 09:45:17 crc kubenswrapper[4779]: I0929 09:45:17.945193 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Sep 29 09:45:17 crc kubenswrapper[4779]: I0929 09:45:17.998825 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4d54n\" (UniqueName: \"kubernetes.io/projected/d00ff60f-2316-40a6-a874-c7f4e6506a48-kube-api-access-4d54n\") pod \"ovsdbserver-sb-0\" (UID: \"d00ff60f-2316-40a6-a874-c7f4e6506a48\") " pod="openstack/ovsdbserver-sb-0" Sep 29 09:45:17 crc kubenswrapper[4779]: I0929 09:45:17.998894 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/d00ff60f-2316-40a6-a874-c7f4e6506a48-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"d00ff60f-2316-40a6-a874-c7f4e6506a48\") " pod="openstack/ovsdbserver-sb-0" Sep 29 09:45:17 crc kubenswrapper[4779]: I0929 09:45:17.998944 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d00ff60f-2316-40a6-a874-c7f4e6506a48-config\") pod \"ovsdbserver-sb-0\" (UID: \"d00ff60f-2316-40a6-a874-c7f4e6506a48\") " pod="openstack/ovsdbserver-sb-0" Sep 29 09:45:17 crc kubenswrapper[4779]: I0929 09:45:17.999116 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-sb-0\" (UID: \"d00ff60f-2316-40a6-a874-c7f4e6506a48\") " pod="openstack/ovsdbserver-sb-0" Sep 29 09:45:17 crc kubenswrapper[4779]: I0929 09:45:17.999145 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/d00ff60f-2316-40a6-a874-c7f4e6506a48-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"d00ff60f-2316-40a6-a874-c7f4e6506a48\") " pod="openstack/ovsdbserver-sb-0" Sep 29 09:45:17 crc kubenswrapper[4779]: I0929 09:45:17.999198 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d00ff60f-2316-40a6-a874-c7f4e6506a48-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"d00ff60f-2316-40a6-a874-c7f4e6506a48\") " pod="openstack/ovsdbserver-sb-0" Sep 29 09:45:17 crc kubenswrapper[4779]: I0929 09:45:17.999221 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/d00ff60f-2316-40a6-a874-c7f4e6506a48-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"d00ff60f-2316-40a6-a874-c7f4e6506a48\") " pod="openstack/ovsdbserver-sb-0" Sep 29 09:45:17 crc kubenswrapper[4779]: I0929 09:45:17.999434 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d00ff60f-2316-40a6-a874-c7f4e6506a48-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"d00ff60f-2316-40a6-a874-c7f4e6506a48\") " pod="openstack/ovsdbserver-sb-0" Sep 29 09:45:18 crc kubenswrapper[4779]: I0929 09:45:18.078729 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-9d9d94d6f-fkg46"] Sep 29 09:45:18 crc kubenswrapper[4779]: I0929 09:45:18.080460 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b8b7cfc7-dnc8g"] Sep 29 09:45:18 crc kubenswrapper[4779]: W0929 09:45:18.084739 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod53d6662d_fb9b_4a71_84e7_13716b4a18b1.slice/crio-3eed1c4cb980f68b2bdc375b10b2e87455afbc474a8409b8b5c9e99596922106 WatchSource:0}: Error finding container 3eed1c4cb980f68b2bdc375b10b2e87455afbc474a8409b8b5c9e99596922106: Status 404 returned error can't find the container with id 3eed1c4cb980f68b2bdc375b10b2e87455afbc474a8409b8b5c9e99596922106 Sep 29 09:45:18 crc kubenswrapper[4779]: I0929 09:45:18.086628 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Sep 29 09:45:18 crc kubenswrapper[4779]: W0929 09:45:18.091546 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod00e4a196_0951_4c3a_9a1e_65e24cf2e6a0.slice/crio-acd76df4be3e4d0493554ea1a4e07465d66672abcf444ad1656952fb6b2e1796 WatchSource:0}: Error finding container acd76df4be3e4d0493554ea1a4e07465d66672abcf444ad1656952fb6b2e1796: Status 404 returned error can't find the container with id acd76df4be3e4d0493554ea1a4e07465d66672abcf444ad1656952fb6b2e1796 Sep 29 09:45:18 crc kubenswrapper[4779]: I0929 09:45:18.092973 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 09:45:18 crc kubenswrapper[4779]: W0929 09:45:18.096104 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod75fdeaf0_3dbf_4249_b03d_8b59289a2d58.slice/crio-0a42e86c25163336b7aa750e82710b2b5141abe525490ca885decbc38ec856bc WatchSource:0}: Error finding container 0a42e86c25163336b7aa750e82710b2b5141abe525490ca885decbc38ec856bc: Status 404 returned error can't find the container with id 0a42e86c25163336b7aa750e82710b2b5141abe525490ca885decbc38ec856bc Sep 29 09:45:18 crc kubenswrapper[4779]: I0929 09:45:18.100960 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d00ff60f-2316-40a6-a874-c7f4e6506a48-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"d00ff60f-2316-40a6-a874-c7f4e6506a48\") " pod="openstack/ovsdbserver-sb-0" Sep 29 09:45:18 crc kubenswrapper[4779]: I0929 09:45:18.101018 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4d54n\" (UniqueName: \"kubernetes.io/projected/d00ff60f-2316-40a6-a874-c7f4e6506a48-kube-api-access-4d54n\") pod \"ovsdbserver-sb-0\" (UID: \"d00ff60f-2316-40a6-a874-c7f4e6506a48\") " pod="openstack/ovsdbserver-sb-0" Sep 29 09:45:18 crc kubenswrapper[4779]: I0929 09:45:18.101096 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/d00ff60f-2316-40a6-a874-c7f4e6506a48-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"d00ff60f-2316-40a6-a874-c7f4e6506a48\") " pod="openstack/ovsdbserver-sb-0" Sep 29 09:45:18 crc kubenswrapper[4779]: I0929 09:45:18.101131 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d00ff60f-2316-40a6-a874-c7f4e6506a48-config\") pod \"ovsdbserver-sb-0\" (UID: \"d00ff60f-2316-40a6-a874-c7f4e6506a48\") " pod="openstack/ovsdbserver-sb-0" Sep 29 09:45:18 crc kubenswrapper[4779]: I0929 09:45:18.101253 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-sb-0\" (UID: \"d00ff60f-2316-40a6-a874-c7f4e6506a48\") " pod="openstack/ovsdbserver-sb-0" Sep 29 09:45:18 crc kubenswrapper[4779]: I0929 09:45:18.101282 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/d00ff60f-2316-40a6-a874-c7f4e6506a48-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"d00ff60f-2316-40a6-a874-c7f4e6506a48\") " pod="openstack/ovsdbserver-sb-0" Sep 29 09:45:18 crc kubenswrapper[4779]: I0929 09:45:18.101335 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d00ff60f-2316-40a6-a874-c7f4e6506a48-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"d00ff60f-2316-40a6-a874-c7f4e6506a48\") " pod="openstack/ovsdbserver-sb-0" Sep 29 09:45:18 crc kubenswrapper[4779]: I0929 09:45:18.101360 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/d00ff60f-2316-40a6-a874-c7f4e6506a48-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"d00ff60f-2316-40a6-a874-c7f4e6506a48\") " pod="openstack/ovsdbserver-sb-0" Sep 29 09:45:18 crc kubenswrapper[4779]: I0929 09:45:18.102772 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d00ff60f-2316-40a6-a874-c7f4e6506a48-config\") pod \"ovsdbserver-sb-0\" (UID: \"d00ff60f-2316-40a6-a874-c7f4e6506a48\") " pod="openstack/ovsdbserver-sb-0" Sep 29 09:45:18 crc kubenswrapper[4779]: I0929 09:45:18.103995 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/d00ff60f-2316-40a6-a874-c7f4e6506a48-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"d00ff60f-2316-40a6-a874-c7f4e6506a48\") " pod="openstack/ovsdbserver-sb-0" Sep 29 09:45:18 crc kubenswrapper[4779]: I0929 09:45:18.105482 4779 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-sb-0\" (UID: \"d00ff60f-2316-40a6-a874-c7f4e6506a48\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/ovsdbserver-sb-0" Sep 29 09:45:18 crc kubenswrapper[4779]: I0929 09:45:18.109154 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d00ff60f-2316-40a6-a874-c7f4e6506a48-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"d00ff60f-2316-40a6-a874-c7f4e6506a48\") " pod="openstack/ovsdbserver-sb-0" Sep 29 09:45:18 crc kubenswrapper[4779]: I0929 09:45:18.109390 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Sep 29 09:45:18 crc kubenswrapper[4779]: I0929 09:45:18.110063 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/d00ff60f-2316-40a6-a874-c7f4e6506a48-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"d00ff60f-2316-40a6-a874-c7f4e6506a48\") " pod="openstack/ovsdbserver-sb-0" Sep 29 09:45:18 crc kubenswrapper[4779]: I0929 09:45:18.114062 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d00ff60f-2316-40a6-a874-c7f4e6506a48-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"d00ff60f-2316-40a6-a874-c7f4e6506a48\") " pod="openstack/ovsdbserver-sb-0" Sep 29 09:45:18 crc kubenswrapper[4779]: I0929 09:45:18.114641 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/d00ff60f-2316-40a6-a874-c7f4e6506a48-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"d00ff60f-2316-40a6-a874-c7f4e6506a48\") " pod="openstack/ovsdbserver-sb-0" Sep 29 09:45:18 crc kubenswrapper[4779]: I0929 09:45:18.121960 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-notifications-server-0"] Sep 29 09:45:18 crc kubenswrapper[4779]: I0929 09:45:18.131309 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4d54n\" (UniqueName: \"kubernetes.io/projected/d00ff60f-2316-40a6-a874-c7f4e6506a48-kube-api-access-4d54n\") pod \"ovsdbserver-sb-0\" (UID: \"d00ff60f-2316-40a6-a874-c7f4e6506a48\") " pod="openstack/ovsdbserver-sb-0" Sep 29 09:45:18 crc kubenswrapper[4779]: I0929 09:45:18.140700 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Sep 29 09:45:18 crc kubenswrapper[4779]: I0929 09:45:18.183149 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-sb-0\" (UID: \"d00ff60f-2316-40a6-a874-c7f4e6506a48\") " pod="openstack/ovsdbserver-sb-0" Sep 29 09:45:18 crc kubenswrapper[4779]: I0929 09:45:18.246872 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-bbznl"] Sep 29 09:45:18 crc kubenswrapper[4779]: I0929 09:45:18.259263 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Sep 29 09:45:18 crc kubenswrapper[4779]: I0929 09:45:18.355701 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b7756dccc-dk8xp"] Sep 29 09:45:18 crc kubenswrapper[4779]: I0929 09:45:18.360148 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 09:45:18 crc kubenswrapper[4779]: I0929 09:45:18.375971 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 09:45:18 crc kubenswrapper[4779]: I0929 09:45:18.383179 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-dzjsq"] Sep 29 09:45:18 crc kubenswrapper[4779]: W0929 09:45:18.440456 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podeb6a2725_2f24_4e1f_9791_d544f59eddeb.slice/crio-9459d52f100ff40f181d7bd7b0be17c8db292f239c2fe274eb5981d423b4d7ef WatchSource:0}: Error finding container 9459d52f100ff40f181d7bd7b0be17c8db292f239c2fe274eb5981d423b4d7ef: Status 404 returned error can't find the container with id 9459d52f100ff40f181d7bd7b0be17c8db292f239c2fe274eb5981d423b4d7ef Sep 29 09:45:18 crc kubenswrapper[4779]: W0929 09:45:18.469547 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod290bdf85_850a_4b79_85f7_dc2e662e0ae9.slice/crio-68b3d20aeae603d3f7f1dded7f1836f832aa1237360b0130e1ca2a3625f0a423 WatchSource:0}: Error finding container 68b3d20aeae603d3f7f1dded7f1836f832aa1237360b0130e1ca2a3625f0a423: Status 404 returned error can't find the container with id 68b3d20aeae603d3f7f1dded7f1836f832aa1237360b0130e1ca2a3625f0a423 Sep 29 09:45:18 crc kubenswrapper[4779]: I0929 09:45:18.554976 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5fb658dc65-qf22m" Sep 29 09:45:18 crc kubenswrapper[4779]: I0929 09:45:18.600649 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7687d5c5c-92rcn" Sep 29 09:45:18 crc kubenswrapper[4779]: I0929 09:45:18.612024 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2edf572a-5bab-41c3-8ff8-a668fb8a5893-dns-svc\") pod \"2edf572a-5bab-41c3-8ff8-a668fb8a5893\" (UID: \"2edf572a-5bab-41c3-8ff8-a668fb8a5893\") " Sep 29 09:45:18 crc kubenswrapper[4779]: I0929 09:45:18.612134 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dsmzl\" (UniqueName: \"kubernetes.io/projected/2edf572a-5bab-41c3-8ff8-a668fb8a5893-kube-api-access-dsmzl\") pod \"2edf572a-5bab-41c3-8ff8-a668fb8a5893\" (UID: \"2edf572a-5bab-41c3-8ff8-a668fb8a5893\") " Sep 29 09:45:18 crc kubenswrapper[4779]: I0929 09:45:18.612283 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2edf572a-5bab-41c3-8ff8-a668fb8a5893-config\") pod \"2edf572a-5bab-41c3-8ff8-a668fb8a5893\" (UID: \"2edf572a-5bab-41c3-8ff8-a668fb8a5893\") " Sep 29 09:45:18 crc kubenswrapper[4779]: I0929 09:45:18.615334 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2edf572a-5bab-41c3-8ff8-a668fb8a5893-config" (OuterVolumeSpecName: "config") pod "2edf572a-5bab-41c3-8ff8-a668fb8a5893" (UID: "2edf572a-5bab-41c3-8ff8-a668fb8a5893"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:45:18 crc kubenswrapper[4779]: I0929 09:45:18.616131 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2edf572a-5bab-41c3-8ff8-a668fb8a5893-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "2edf572a-5bab-41c3-8ff8-a668fb8a5893" (UID: "2edf572a-5bab-41c3-8ff8-a668fb8a5893"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:45:18 crc kubenswrapper[4779]: I0929 09:45:18.619442 4779 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2edf572a-5bab-41c3-8ff8-a668fb8a5893-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 09:45:18 crc kubenswrapper[4779]: I0929 09:45:18.619468 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2edf572a-5bab-41c3-8ff8-a668fb8a5893-config\") on node \"crc\" DevicePath \"\"" Sep 29 09:45:18 crc kubenswrapper[4779]: I0929 09:45:18.620292 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2edf572a-5bab-41c3-8ff8-a668fb8a5893-kube-api-access-dsmzl" (OuterVolumeSpecName: "kube-api-access-dsmzl") pod "2edf572a-5bab-41c3-8ff8-a668fb8a5893" (UID: "2edf572a-5bab-41c3-8ff8-a668fb8a5893"). InnerVolumeSpecName "kube-api-access-dsmzl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:45:18 crc kubenswrapper[4779]: I0929 09:45:18.639505 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Sep 29 09:45:18 crc kubenswrapper[4779]: I0929 09:45:18.720819 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39c36338-59ac-4873-9218-f99df307efc3-config\") pod \"39c36338-59ac-4873-9218-f99df307efc3\" (UID: \"39c36338-59ac-4873-9218-f99df307efc3\") " Sep 29 09:45:18 crc kubenswrapper[4779]: I0929 09:45:18.720863 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sx8b8\" (UniqueName: \"kubernetes.io/projected/39c36338-59ac-4873-9218-f99df307efc3-kube-api-access-sx8b8\") pod \"39c36338-59ac-4873-9218-f99df307efc3\" (UID: \"39c36338-59ac-4873-9218-f99df307efc3\") " Sep 29 09:45:18 crc kubenswrapper[4779]: I0929 09:45:18.721190 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dsmzl\" (UniqueName: \"kubernetes.io/projected/2edf572a-5bab-41c3-8ff8-a668fb8a5893-kube-api-access-dsmzl\") on node \"crc\" DevicePath \"\"" Sep 29 09:45:18 crc kubenswrapper[4779]: I0929 09:45:18.721301 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/39c36338-59ac-4873-9218-f99df307efc3-config" (OuterVolumeSpecName: "config") pod "39c36338-59ac-4873-9218-f99df307efc3" (UID: "39c36338-59ac-4873-9218-f99df307efc3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:45:18 crc kubenswrapper[4779]: I0929 09:45:18.725548 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39c36338-59ac-4873-9218-f99df307efc3-kube-api-access-sx8b8" (OuterVolumeSpecName: "kube-api-access-sx8b8") pod "39c36338-59ac-4873-9218-f99df307efc3" (UID: "39c36338-59ac-4873-9218-f99df307efc3"). InnerVolumeSpecName "kube-api-access-sx8b8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:45:18 crc kubenswrapper[4779]: I0929 09:45:18.749292 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Sep 29 09:45:18 crc kubenswrapper[4779]: I0929 09:45:18.822779 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sx8b8\" (UniqueName: \"kubernetes.io/projected/39c36338-59ac-4873-9218-f99df307efc3-kube-api-access-sx8b8\") on node \"crc\" DevicePath \"\"" Sep 29 09:45:18 crc kubenswrapper[4779]: I0929 09:45:18.822819 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39c36338-59ac-4873-9218-f99df307efc3-config\") on node \"crc\" DevicePath \"\"" Sep 29 09:45:18 crc kubenswrapper[4779]: W0929 09:45:18.846035 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod65145612_cd78_4a1f_84c3_ea831e0c83b0.slice/crio-c2d1e9cc609a5f9e5bc9cb2334e2ba4f008bdd2e7f8c98d84c13951dec0cb90a WatchSource:0}: Error finding container c2d1e9cc609a5f9e5bc9cb2334e2ba4f008bdd2e7f8c98d84c13951dec0cb90a: Status 404 returned error can't find the container with id c2d1e9cc609a5f9e5bc9cb2334e2ba4f008bdd2e7f8c98d84c13951dec0cb90a Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.012680 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.077685 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-l5mxb"] Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.078802 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-l5mxb" Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.081584 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.084930 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"b3627e10-4513-49fd-bdf5-0a83db9d8561","Type":"ContainerStarted","Data":"e6ca92459233eba526c312174b0fda7da03a5bd1f1923132652448d46ae5dc09"} Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.098284 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-l5mxb"] Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.099612 4779 generic.go:334] "Generic (PLEG): container finished" podID="71e1e56c-3b30-4233-a2df-3efe864df26a" containerID="d0ab6c95b00c1150411a69422d676510c3140992f02481cfca8d7e8d0a9ad74b" exitCode=0 Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.103586 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-9d9d94d6f-fkg46" event={"ID":"71e1e56c-3b30-4233-a2df-3efe864df26a","Type":"ContainerDied","Data":"d0ab6c95b00c1150411a69422d676510c3140992f02481cfca8d7e8d0a9ad74b"} Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.103647 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-9d9d94d6f-fkg46" event={"ID":"71e1e56c-3b30-4233-a2df-3efe864df26a","Type":"ContainerStarted","Data":"a73acc17bd175af7db31307971359b2be25e91f0db3e842761f4c546885a6895"} Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.127279 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/6553a386-d57f-483f-98ed-99ba90b035c6-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-l5mxb\" (UID: \"6553a386-d57f-483f-98ed-99ba90b035c6\") " pod="openstack/ovn-controller-metrics-l5mxb" Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.127347 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g7q9c\" (UniqueName: \"kubernetes.io/projected/6553a386-d57f-483f-98ed-99ba90b035c6-kube-api-access-g7q9c\") pod \"ovn-controller-metrics-l5mxb\" (UID: \"6553a386-d57f-483f-98ed-99ba90b035c6\") " pod="openstack/ovn-controller-metrics-l5mxb" Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.127523 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/6553a386-d57f-483f-98ed-99ba90b035c6-ovs-rundir\") pod \"ovn-controller-metrics-l5mxb\" (UID: \"6553a386-d57f-483f-98ed-99ba90b035c6\") " pod="openstack/ovn-controller-metrics-l5mxb" Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.127876 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6553a386-d57f-483f-98ed-99ba90b035c6-config\") pod \"ovn-controller-metrics-l5mxb\" (UID: \"6553a386-d57f-483f-98ed-99ba90b035c6\") " pod="openstack/ovn-controller-metrics-l5mxb" Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.128201 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/6553a386-d57f-483f-98ed-99ba90b035c6-ovn-rundir\") pod \"ovn-controller-metrics-l5mxb\" (UID: \"6553a386-d57f-483f-98ed-99ba90b035c6\") " pod="openstack/ovn-controller-metrics-l5mxb" Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.128329 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6553a386-d57f-483f-98ed-99ba90b035c6-combined-ca-bundle\") pod \"ovn-controller-metrics-l5mxb\" (UID: \"6553a386-d57f-483f-98ed-99ba90b035c6\") " pod="openstack/ovn-controller-metrics-l5mxb" Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.156787 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5fb658dc65-qf22m" Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.156788 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5fb658dc65-qf22m" event={"ID":"2edf572a-5bab-41c3-8ff8-a668fb8a5893","Type":"ContainerDied","Data":"bb1f2904293b5d2c5782d8f9fceff9cd7810c1992c8f3595a4df74bd1f3ebc67"} Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.162089 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"8faade2a-9a07-45b9-99e4-b448b64afaaa","Type":"ContainerStarted","Data":"621b630c1e35207125fe79ef7c88c8f1fc2945c4bbefe71ee5f3cac1578faa64"} Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.168763 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"d00ff60f-2316-40a6-a874-c7f4e6506a48","Type":"ContainerStarted","Data":"9c8a84157f3bdbafb778018fda08664f2ad51ae3e040cd47b3f87de5168f97c4"} Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.176719 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"75fdeaf0-3dbf-4249-b03d-8b59289a2d58","Type":"ContainerStarted","Data":"0a42e86c25163336b7aa750e82710b2b5141abe525490ca885decbc38ec856bc"} Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.182478 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"eb6a2725-2f24-4e1f-9791-d544f59eddeb","Type":"ContainerStarted","Data":"9459d52f100ff40f181d7bd7b0be17c8db292f239c2fe274eb5981d423b4d7ef"} Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.193131 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"00e4a196-0951-4c3a-9a1e-65e24cf2e6a0","Type":"ContainerStarted","Data":"acd76df4be3e4d0493554ea1a4e07465d66672abcf444ad1656952fb6b2e1796"} Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.209140 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"65145612-cd78-4a1f-84c3-ea831e0c83b0","Type":"ContainerStarted","Data":"c2d1e9cc609a5f9e5bc9cb2334e2ba4f008bdd2e7f8c98d84c13951dec0cb90a"} Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.221348 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-bbznl" event={"ID":"a2745c65-9c13-46fa-b3e6-37731ad17208","Type":"ContainerStarted","Data":"ebf1685f38ab1069525259880a769e8a4dc47f1c3f8dfaad0790987a34b52c4e"} Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.227684 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-notifications-server-0" event={"ID":"daa6f578-ea44-4555-be0c-e2b8662386f0","Type":"ContainerStarted","Data":"11dd1ccedcc3c90292d59b1afb9df5be0084b3306034a93158a31a019dca6aa6"} Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.228674 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5fb658dc65-qf22m"] Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.230545 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6553a386-d57f-483f-98ed-99ba90b035c6-config\") pod \"ovn-controller-metrics-l5mxb\" (UID: \"6553a386-d57f-483f-98ed-99ba90b035c6\") " pod="openstack/ovn-controller-metrics-l5mxb" Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.230634 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/6553a386-d57f-483f-98ed-99ba90b035c6-ovn-rundir\") pod \"ovn-controller-metrics-l5mxb\" (UID: \"6553a386-d57f-483f-98ed-99ba90b035c6\") " pod="openstack/ovn-controller-metrics-l5mxb" Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.230667 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6553a386-d57f-483f-98ed-99ba90b035c6-combined-ca-bundle\") pod \"ovn-controller-metrics-l5mxb\" (UID: \"6553a386-d57f-483f-98ed-99ba90b035c6\") " pod="openstack/ovn-controller-metrics-l5mxb" Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.230729 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/6553a386-d57f-483f-98ed-99ba90b035c6-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-l5mxb\" (UID: \"6553a386-d57f-483f-98ed-99ba90b035c6\") " pod="openstack/ovn-controller-metrics-l5mxb" Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.230759 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g7q9c\" (UniqueName: \"kubernetes.io/projected/6553a386-d57f-483f-98ed-99ba90b035c6-kube-api-access-g7q9c\") pod \"ovn-controller-metrics-l5mxb\" (UID: \"6553a386-d57f-483f-98ed-99ba90b035c6\") " pod="openstack/ovn-controller-metrics-l5mxb" Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.230790 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/6553a386-d57f-483f-98ed-99ba90b035c6-ovs-rundir\") pod \"ovn-controller-metrics-l5mxb\" (UID: \"6553a386-d57f-483f-98ed-99ba90b035c6\") " pod="openstack/ovn-controller-metrics-l5mxb" Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.231481 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/6553a386-d57f-483f-98ed-99ba90b035c6-ovs-rundir\") pod \"ovn-controller-metrics-l5mxb\" (UID: \"6553a386-d57f-483f-98ed-99ba90b035c6\") " pod="openstack/ovn-controller-metrics-l5mxb" Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.233105 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6553a386-d57f-483f-98ed-99ba90b035c6-config\") pod \"ovn-controller-metrics-l5mxb\" (UID: \"6553a386-d57f-483f-98ed-99ba90b035c6\") " pod="openstack/ovn-controller-metrics-l5mxb" Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.233328 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/6553a386-d57f-483f-98ed-99ba90b035c6-ovn-rundir\") pod \"ovn-controller-metrics-l5mxb\" (UID: \"6553a386-d57f-483f-98ed-99ba90b035c6\") " pod="openstack/ovn-controller-metrics-l5mxb" Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.234330 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7687d5c5c-92rcn" event={"ID":"39c36338-59ac-4873-9218-f99df307efc3","Type":"ContainerDied","Data":"4bd272758eb1173f9c071e4307da3b8ae38d9e47a4ceea5913b5c63ae0936435"} Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.234724 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7687d5c5c-92rcn" Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.235649 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/6553a386-d57f-483f-98ed-99ba90b035c6-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-l5mxb\" (UID: \"6553a386-d57f-483f-98ed-99ba90b035c6\") " pod="openstack/ovn-controller-metrics-l5mxb" Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.244938 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6553a386-d57f-483f-98ed-99ba90b035c6-combined-ca-bundle\") pod \"ovn-controller-metrics-l5mxb\" (UID: \"6553a386-d57f-483f-98ed-99ba90b035c6\") " pod="openstack/ovn-controller-metrics-l5mxb" Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.250698 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g7q9c\" (UniqueName: \"kubernetes.io/projected/6553a386-d57f-483f-98ed-99ba90b035c6-kube-api-access-g7q9c\") pod \"ovn-controller-metrics-l5mxb\" (UID: \"6553a386-d57f-483f-98ed-99ba90b035c6\") " pod="openstack/ovn-controller-metrics-l5mxb" Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.259497 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"779f829e-6240-47a5-8d8d-9e279d316df7","Type":"ContainerStarted","Data":"10c37261f0ce9eebaf30a61f3b0ec30a08c394a5065f89fbb65c2fa6f3efadcd"} Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.277755 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"bb5f88d2-6663-4ed6-a7a7-93ee500c9edf","Type":"ContainerStarted","Data":"25ce4773f0f2ac68fca01c612505f30c79fc1e73556668ff1c330ccfff57ba47"} Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.285374 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5fb658dc65-qf22m"] Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.305407 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7687d5c5c-92rcn"] Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.309308 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7687d5c5c-92rcn"] Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.310643 4779 generic.go:334] "Generic (PLEG): container finished" podID="1d2e7b20-7b93-45fb-a5c0-4ab547aa036f" containerID="c3bc6467ec6e10738d188e16721138de51cc5e9d5fd778e6b3a66714b033ede9" exitCode=0 Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.310827 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b7756dccc-dk8xp" event={"ID":"1d2e7b20-7b93-45fb-a5c0-4ab547aa036f","Type":"ContainerDied","Data":"c3bc6467ec6e10738d188e16721138de51cc5e9d5fd778e6b3a66714b033ede9"} Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.310877 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b7756dccc-dk8xp" event={"ID":"1d2e7b20-7b93-45fb-a5c0-4ab547aa036f","Type":"ContainerStarted","Data":"d29f065c7f9ff8ba6605e179afaf748c06766a0dbcb2a7536a04623bd059fe0a"} Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.313874 4779 generic.go:334] "Generic (PLEG): container finished" podID="53d6662d-fb9b-4a71-84e7-13716b4a18b1" containerID="e8e6b93962dc20a0a0f533612ae462ad71879f06a1305aa870603c71ad98bd6b" exitCode=0 Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.313944 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b8b7cfc7-dnc8g" event={"ID":"53d6662d-fb9b-4a71-84e7-13716b4a18b1","Type":"ContainerDied","Data":"e8e6b93962dc20a0a0f533612ae462ad71879f06a1305aa870603c71ad98bd6b"} Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.313966 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b8b7cfc7-dnc8g" event={"ID":"53d6662d-fb9b-4a71-84e7-13716b4a18b1","Type":"ContainerStarted","Data":"3eed1c4cb980f68b2bdc375b10b2e87455afbc474a8409b8b5c9e99596922106"} Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.314997 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b7756dccc-dk8xp"] Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.326656 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-dzjsq" event={"ID":"290bdf85-850a-4b79-85f7-dc2e662e0ae9","Type":"ContainerStarted","Data":"68b3d20aeae603d3f7f1dded7f1836f832aa1237360b0130e1ca2a3625f0a423"} Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.355818 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-564fc564d5-g4c8f"] Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.358215 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-564fc564d5-g4c8f" Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.362275 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.409582 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-l5mxb" Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.428377 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-564fc564d5-g4c8f"] Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.435750 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/503e48ea-44a0-4f04-be6f-4ce12d42b42b-ovsdbserver-nb\") pod \"dnsmasq-dns-564fc564d5-g4c8f\" (UID: \"503e48ea-44a0-4f04-be6f-4ce12d42b42b\") " pod="openstack/dnsmasq-dns-564fc564d5-g4c8f" Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.435949 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tvf5m\" (UniqueName: \"kubernetes.io/projected/503e48ea-44a0-4f04-be6f-4ce12d42b42b-kube-api-access-tvf5m\") pod \"dnsmasq-dns-564fc564d5-g4c8f\" (UID: \"503e48ea-44a0-4f04-be6f-4ce12d42b42b\") " pod="openstack/dnsmasq-dns-564fc564d5-g4c8f" Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.436234 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/503e48ea-44a0-4f04-be6f-4ce12d42b42b-config\") pod \"dnsmasq-dns-564fc564d5-g4c8f\" (UID: \"503e48ea-44a0-4f04-be6f-4ce12d42b42b\") " pod="openstack/dnsmasq-dns-564fc564d5-g4c8f" Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.437047 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/503e48ea-44a0-4f04-be6f-4ce12d42b42b-dns-svc\") pod \"dnsmasq-dns-564fc564d5-g4c8f\" (UID: \"503e48ea-44a0-4f04-be6f-4ce12d42b42b\") " pod="openstack/dnsmasq-dns-564fc564d5-g4c8f" Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.539474 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/503e48ea-44a0-4f04-be6f-4ce12d42b42b-dns-svc\") pod \"dnsmasq-dns-564fc564d5-g4c8f\" (UID: \"503e48ea-44a0-4f04-be6f-4ce12d42b42b\") " pod="openstack/dnsmasq-dns-564fc564d5-g4c8f" Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.539801 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/503e48ea-44a0-4f04-be6f-4ce12d42b42b-ovsdbserver-nb\") pod \"dnsmasq-dns-564fc564d5-g4c8f\" (UID: \"503e48ea-44a0-4f04-be6f-4ce12d42b42b\") " pod="openstack/dnsmasq-dns-564fc564d5-g4c8f" Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.539852 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tvf5m\" (UniqueName: \"kubernetes.io/projected/503e48ea-44a0-4f04-be6f-4ce12d42b42b-kube-api-access-tvf5m\") pod \"dnsmasq-dns-564fc564d5-g4c8f\" (UID: \"503e48ea-44a0-4f04-be6f-4ce12d42b42b\") " pod="openstack/dnsmasq-dns-564fc564d5-g4c8f" Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.539886 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/503e48ea-44a0-4f04-be6f-4ce12d42b42b-config\") pod \"dnsmasq-dns-564fc564d5-g4c8f\" (UID: \"503e48ea-44a0-4f04-be6f-4ce12d42b42b\") " pod="openstack/dnsmasq-dns-564fc564d5-g4c8f" Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.541878 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/503e48ea-44a0-4f04-be6f-4ce12d42b42b-config\") pod \"dnsmasq-dns-564fc564d5-g4c8f\" (UID: \"503e48ea-44a0-4f04-be6f-4ce12d42b42b\") " pod="openstack/dnsmasq-dns-564fc564d5-g4c8f" Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.543961 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/503e48ea-44a0-4f04-be6f-4ce12d42b42b-dns-svc\") pod \"dnsmasq-dns-564fc564d5-g4c8f\" (UID: \"503e48ea-44a0-4f04-be6f-4ce12d42b42b\") " pod="openstack/dnsmasq-dns-564fc564d5-g4c8f" Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.544721 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/503e48ea-44a0-4f04-be6f-4ce12d42b42b-ovsdbserver-nb\") pod \"dnsmasq-dns-564fc564d5-g4c8f\" (UID: \"503e48ea-44a0-4f04-be6f-4ce12d42b42b\") " pod="openstack/dnsmasq-dns-564fc564d5-g4c8f" Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.584961 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-9d9d94d6f-fkg46"] Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.610026 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tvf5m\" (UniqueName: \"kubernetes.io/projected/503e48ea-44a0-4f04-be6f-4ce12d42b42b-kube-api-access-tvf5m\") pod \"dnsmasq-dns-564fc564d5-g4c8f\" (UID: \"503e48ea-44a0-4f04-be6f-4ce12d42b42b\") " pod="openstack/dnsmasq-dns-564fc564d5-g4c8f" Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.625077 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-676856544c-dqj7w"] Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.626427 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-676856544c-dqj7w" Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.632012 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.652972 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-676856544c-dqj7w"] Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.703021 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-564fc564d5-g4c8f" Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.742222 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f5b067c-e255-4746-abd6-88e8ec9d8a85-config\") pod \"dnsmasq-dns-676856544c-dqj7w\" (UID: \"9f5b067c-e255-4746-abd6-88e8ec9d8a85\") " pod="openstack/dnsmasq-dns-676856544c-dqj7w" Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.742259 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9f5b067c-e255-4746-abd6-88e8ec9d8a85-dns-svc\") pod \"dnsmasq-dns-676856544c-dqj7w\" (UID: \"9f5b067c-e255-4746-abd6-88e8ec9d8a85\") " pod="openstack/dnsmasq-dns-676856544c-dqj7w" Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.742295 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q655t\" (UniqueName: \"kubernetes.io/projected/9f5b067c-e255-4746-abd6-88e8ec9d8a85-kube-api-access-q655t\") pod \"dnsmasq-dns-676856544c-dqj7w\" (UID: \"9f5b067c-e255-4746-abd6-88e8ec9d8a85\") " pod="openstack/dnsmasq-dns-676856544c-dqj7w" Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.742370 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9f5b067c-e255-4746-abd6-88e8ec9d8a85-ovsdbserver-sb\") pod \"dnsmasq-dns-676856544c-dqj7w\" (UID: \"9f5b067c-e255-4746-abd6-88e8ec9d8a85\") " pod="openstack/dnsmasq-dns-676856544c-dqj7w" Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.742540 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9f5b067c-e255-4746-abd6-88e8ec9d8a85-ovsdbserver-nb\") pod \"dnsmasq-dns-676856544c-dqj7w\" (UID: \"9f5b067c-e255-4746-abd6-88e8ec9d8a85\") " pod="openstack/dnsmasq-dns-676856544c-dqj7w" Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.844349 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9f5b067c-e255-4746-abd6-88e8ec9d8a85-dns-svc\") pod \"dnsmasq-dns-676856544c-dqj7w\" (UID: \"9f5b067c-e255-4746-abd6-88e8ec9d8a85\") " pod="openstack/dnsmasq-dns-676856544c-dqj7w" Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.844428 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q655t\" (UniqueName: \"kubernetes.io/projected/9f5b067c-e255-4746-abd6-88e8ec9d8a85-kube-api-access-q655t\") pod \"dnsmasq-dns-676856544c-dqj7w\" (UID: \"9f5b067c-e255-4746-abd6-88e8ec9d8a85\") " pod="openstack/dnsmasq-dns-676856544c-dqj7w" Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.844474 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9f5b067c-e255-4746-abd6-88e8ec9d8a85-ovsdbserver-sb\") pod \"dnsmasq-dns-676856544c-dqj7w\" (UID: \"9f5b067c-e255-4746-abd6-88e8ec9d8a85\") " pod="openstack/dnsmasq-dns-676856544c-dqj7w" Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.844568 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9f5b067c-e255-4746-abd6-88e8ec9d8a85-ovsdbserver-nb\") pod \"dnsmasq-dns-676856544c-dqj7w\" (UID: \"9f5b067c-e255-4746-abd6-88e8ec9d8a85\") " pod="openstack/dnsmasq-dns-676856544c-dqj7w" Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.844630 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f5b067c-e255-4746-abd6-88e8ec9d8a85-config\") pod \"dnsmasq-dns-676856544c-dqj7w\" (UID: \"9f5b067c-e255-4746-abd6-88e8ec9d8a85\") " pod="openstack/dnsmasq-dns-676856544c-dqj7w" Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.845318 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f5b067c-e255-4746-abd6-88e8ec9d8a85-config\") pod \"dnsmasq-dns-676856544c-dqj7w\" (UID: \"9f5b067c-e255-4746-abd6-88e8ec9d8a85\") " pod="openstack/dnsmasq-dns-676856544c-dqj7w" Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.845534 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9f5b067c-e255-4746-abd6-88e8ec9d8a85-dns-svc\") pod \"dnsmasq-dns-676856544c-dqj7w\" (UID: \"9f5b067c-e255-4746-abd6-88e8ec9d8a85\") " pod="openstack/dnsmasq-dns-676856544c-dqj7w" Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.846178 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9f5b067c-e255-4746-abd6-88e8ec9d8a85-ovsdbserver-sb\") pod \"dnsmasq-dns-676856544c-dqj7w\" (UID: \"9f5b067c-e255-4746-abd6-88e8ec9d8a85\") " pod="openstack/dnsmasq-dns-676856544c-dqj7w" Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.846193 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9f5b067c-e255-4746-abd6-88e8ec9d8a85-ovsdbserver-nb\") pod \"dnsmasq-dns-676856544c-dqj7w\" (UID: \"9f5b067c-e255-4746-abd6-88e8ec9d8a85\") " pod="openstack/dnsmasq-dns-676856544c-dqj7w" Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.869776 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q655t\" (UniqueName: \"kubernetes.io/projected/9f5b067c-e255-4746-abd6-88e8ec9d8a85-kube-api-access-q655t\") pod \"dnsmasq-dns-676856544c-dqj7w\" (UID: \"9f5b067c-e255-4746-abd6-88e8ec9d8a85\") " pod="openstack/dnsmasq-dns-676856544c-dqj7w" Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.962037 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b8b7cfc7-dnc8g" Sep 29 09:45:19 crc kubenswrapper[4779]: I0929 09:45:19.972831 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-676856544c-dqj7w" Sep 29 09:45:20 crc kubenswrapper[4779]: I0929 09:45:20.055599 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bmcpr\" (UniqueName: \"kubernetes.io/projected/53d6662d-fb9b-4a71-84e7-13716b4a18b1-kube-api-access-bmcpr\") pod \"53d6662d-fb9b-4a71-84e7-13716b4a18b1\" (UID: \"53d6662d-fb9b-4a71-84e7-13716b4a18b1\") " Sep 29 09:45:20 crc kubenswrapper[4779]: I0929 09:45:20.055769 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/53d6662d-fb9b-4a71-84e7-13716b4a18b1-config\") pod \"53d6662d-fb9b-4a71-84e7-13716b4a18b1\" (UID: \"53d6662d-fb9b-4a71-84e7-13716b4a18b1\") " Sep 29 09:45:20 crc kubenswrapper[4779]: I0929 09:45:20.055865 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/53d6662d-fb9b-4a71-84e7-13716b4a18b1-dns-svc\") pod \"53d6662d-fb9b-4a71-84e7-13716b4a18b1\" (UID: \"53d6662d-fb9b-4a71-84e7-13716b4a18b1\") " Sep 29 09:45:20 crc kubenswrapper[4779]: I0929 09:45:20.060008 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/53d6662d-fb9b-4a71-84e7-13716b4a18b1-kube-api-access-bmcpr" (OuterVolumeSpecName: "kube-api-access-bmcpr") pod "53d6662d-fb9b-4a71-84e7-13716b4a18b1" (UID: "53d6662d-fb9b-4a71-84e7-13716b4a18b1"). InnerVolumeSpecName "kube-api-access-bmcpr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:45:20 crc kubenswrapper[4779]: I0929 09:45:20.075563 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/53d6662d-fb9b-4a71-84e7-13716b4a18b1-config" (OuterVolumeSpecName: "config") pod "53d6662d-fb9b-4a71-84e7-13716b4a18b1" (UID: "53d6662d-fb9b-4a71-84e7-13716b4a18b1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:45:20 crc kubenswrapper[4779]: I0929 09:45:20.093939 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/53d6662d-fb9b-4a71-84e7-13716b4a18b1-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "53d6662d-fb9b-4a71-84e7-13716b4a18b1" (UID: "53d6662d-fb9b-4a71-84e7-13716b4a18b1"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:45:20 crc kubenswrapper[4779]: I0929 09:45:20.151486 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-l5mxb"] Sep 29 09:45:20 crc kubenswrapper[4779]: I0929 09:45:20.168409 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bmcpr\" (UniqueName: \"kubernetes.io/projected/53d6662d-fb9b-4a71-84e7-13716b4a18b1-kube-api-access-bmcpr\") on node \"crc\" DevicePath \"\"" Sep 29 09:45:20 crc kubenswrapper[4779]: I0929 09:45:20.168438 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/53d6662d-fb9b-4a71-84e7-13716b4a18b1-config\") on node \"crc\" DevicePath \"\"" Sep 29 09:45:20 crc kubenswrapper[4779]: I0929 09:45:20.168447 4779 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/53d6662d-fb9b-4a71-84e7-13716b4a18b1-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 09:45:20 crc kubenswrapper[4779]: I0929 09:45:20.350082 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b8b7cfc7-dnc8g" event={"ID":"53d6662d-fb9b-4a71-84e7-13716b4a18b1","Type":"ContainerDied","Data":"3eed1c4cb980f68b2bdc375b10b2e87455afbc474a8409b8b5c9e99596922106"} Sep 29 09:45:20 crc kubenswrapper[4779]: I0929 09:45:20.350565 4779 scope.go:117] "RemoveContainer" containerID="e8e6b93962dc20a0a0f533612ae462ad71879f06a1305aa870603c71ad98bd6b" Sep 29 09:45:20 crc kubenswrapper[4779]: I0929 09:45:20.351122 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b8b7cfc7-dnc8g" Sep 29 09:45:20 crc kubenswrapper[4779]: I0929 09:45:20.370021 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-9d9d94d6f-fkg46" event={"ID":"71e1e56c-3b30-4233-a2df-3efe864df26a","Type":"ContainerStarted","Data":"78338f49880ab2c4faf4dd9bcc6507a2ccab7723460b0406f281ab4b61822ccc"} Sep 29 09:45:20 crc kubenswrapper[4779]: I0929 09:45:20.370160 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-9d9d94d6f-fkg46" podUID="71e1e56c-3b30-4233-a2df-3efe864df26a" containerName="dnsmasq-dns" containerID="cri-o://78338f49880ab2c4faf4dd9bcc6507a2ccab7723460b0406f281ab4b61822ccc" gracePeriod=10 Sep 29 09:45:20 crc kubenswrapper[4779]: I0929 09:45:20.370412 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-9d9d94d6f-fkg46" Sep 29 09:45:20 crc kubenswrapper[4779]: I0929 09:45:20.393775 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-9d9d94d6f-fkg46" podStartSLOduration=16.257179805 podStartE2EDuration="16.393739228s" podCreationTimestamp="2025-09-29 09:45:04 +0000 UTC" firstStartedPulling="2025-09-29 09:45:18.078519852 +0000 UTC m=+950.059843756" lastFinishedPulling="2025-09-29 09:45:18.215079275 +0000 UTC m=+950.196403179" observedRunningTime="2025-09-29 09:45:20.389796962 +0000 UTC m=+952.371120876" watchObservedRunningTime="2025-09-29 09:45:20.393739228 +0000 UTC m=+952.375063142" Sep 29 09:45:20 crc kubenswrapper[4779]: I0929 09:45:20.446488 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b8b7cfc7-dnc8g"] Sep 29 09:45:20 crc kubenswrapper[4779]: I0929 09:45:20.463684 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5b8b7cfc7-dnc8g"] Sep 29 09:45:20 crc kubenswrapper[4779]: I0929 09:45:20.726932 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2edf572a-5bab-41c3-8ff8-a668fb8a5893" path="/var/lib/kubelet/pods/2edf572a-5bab-41c3-8ff8-a668fb8a5893/volumes" Sep 29 09:45:20 crc kubenswrapper[4779]: I0929 09:45:20.727273 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="39c36338-59ac-4873-9218-f99df307efc3" path="/var/lib/kubelet/pods/39c36338-59ac-4873-9218-f99df307efc3/volumes" Sep 29 09:45:20 crc kubenswrapper[4779]: I0929 09:45:20.727636 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="53d6662d-fb9b-4a71-84e7-13716b4a18b1" path="/var/lib/kubelet/pods/53d6662d-fb9b-4a71-84e7-13716b4a18b1/volumes" Sep 29 09:45:21 crc kubenswrapper[4779]: I0929 09:45:21.383468 4779 generic.go:334] "Generic (PLEG): container finished" podID="71e1e56c-3b30-4233-a2df-3efe864df26a" containerID="78338f49880ab2c4faf4dd9bcc6507a2ccab7723460b0406f281ab4b61822ccc" exitCode=0 Sep 29 09:45:21 crc kubenswrapper[4779]: I0929 09:45:21.383806 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-9d9d94d6f-fkg46" event={"ID":"71e1e56c-3b30-4233-a2df-3efe864df26a","Type":"ContainerDied","Data":"78338f49880ab2c4faf4dd9bcc6507a2ccab7723460b0406f281ab4b61822ccc"} Sep 29 09:45:21 crc kubenswrapper[4779]: I0929 09:45:21.385549 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-l5mxb" event={"ID":"6553a386-d57f-483f-98ed-99ba90b035c6","Type":"ContainerStarted","Data":"dd78dd75407dfc6538d13c4ca1416cc37f702cbc6787ae7674eef139b3dc979b"} Sep 29 09:45:21 crc kubenswrapper[4779]: I0929 09:45:21.423682 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-564fc564d5-g4c8f"] Sep 29 09:45:21 crc kubenswrapper[4779]: I0929 09:45:21.887811 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-9d9d94d6f-fkg46" Sep 29 09:45:22 crc kubenswrapper[4779]: I0929 09:45:22.000365 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/71e1e56c-3b30-4233-a2df-3efe864df26a-dns-svc\") pod \"71e1e56c-3b30-4233-a2df-3efe864df26a\" (UID: \"71e1e56c-3b30-4233-a2df-3efe864df26a\") " Sep 29 09:45:22 crc kubenswrapper[4779]: I0929 09:45:22.000492 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nnc2r\" (UniqueName: \"kubernetes.io/projected/71e1e56c-3b30-4233-a2df-3efe864df26a-kube-api-access-nnc2r\") pod \"71e1e56c-3b30-4233-a2df-3efe864df26a\" (UID: \"71e1e56c-3b30-4233-a2df-3efe864df26a\") " Sep 29 09:45:22 crc kubenswrapper[4779]: I0929 09:45:22.000530 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/71e1e56c-3b30-4233-a2df-3efe864df26a-config\") pod \"71e1e56c-3b30-4233-a2df-3efe864df26a\" (UID: \"71e1e56c-3b30-4233-a2df-3efe864df26a\") " Sep 29 09:45:22 crc kubenswrapper[4779]: I0929 09:45:22.011060 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/71e1e56c-3b30-4233-a2df-3efe864df26a-kube-api-access-nnc2r" (OuterVolumeSpecName: "kube-api-access-nnc2r") pod "71e1e56c-3b30-4233-a2df-3efe864df26a" (UID: "71e1e56c-3b30-4233-a2df-3efe864df26a"). InnerVolumeSpecName "kube-api-access-nnc2r". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:45:22 crc kubenswrapper[4779]: I0929 09:45:22.051607 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/71e1e56c-3b30-4233-a2df-3efe864df26a-config" (OuterVolumeSpecName: "config") pod "71e1e56c-3b30-4233-a2df-3efe864df26a" (UID: "71e1e56c-3b30-4233-a2df-3efe864df26a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:45:22 crc kubenswrapper[4779]: I0929 09:45:22.061524 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/71e1e56c-3b30-4233-a2df-3efe864df26a-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "71e1e56c-3b30-4233-a2df-3efe864df26a" (UID: "71e1e56c-3b30-4233-a2df-3efe864df26a"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:45:22 crc kubenswrapper[4779]: I0929 09:45:22.102332 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nnc2r\" (UniqueName: \"kubernetes.io/projected/71e1e56c-3b30-4233-a2df-3efe864df26a-kube-api-access-nnc2r\") on node \"crc\" DevicePath \"\"" Sep 29 09:45:22 crc kubenswrapper[4779]: I0929 09:45:22.102377 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/71e1e56c-3b30-4233-a2df-3efe864df26a-config\") on node \"crc\" DevicePath \"\"" Sep 29 09:45:22 crc kubenswrapper[4779]: I0929 09:45:22.102391 4779 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/71e1e56c-3b30-4233-a2df-3efe864df26a-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 09:45:22 crc kubenswrapper[4779]: I0929 09:45:22.392506 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-564fc564d5-g4c8f" event={"ID":"503e48ea-44a0-4f04-be6f-4ce12d42b42b","Type":"ContainerStarted","Data":"b69306b0cea280cb9332535de85efa0a459f525e96234093afebeaff3f1bbc16"} Sep 29 09:45:22 crc kubenswrapper[4779]: I0929 09:45:22.394054 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-9d9d94d6f-fkg46" event={"ID":"71e1e56c-3b30-4233-a2df-3efe864df26a","Type":"ContainerDied","Data":"a73acc17bd175af7db31307971359b2be25e91f0db3e842761f4c546885a6895"} Sep 29 09:45:22 crc kubenswrapper[4779]: I0929 09:45:22.394085 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-9d9d94d6f-fkg46" Sep 29 09:45:22 crc kubenswrapper[4779]: I0929 09:45:22.394116 4779 scope.go:117] "RemoveContainer" containerID="78338f49880ab2c4faf4dd9bcc6507a2ccab7723460b0406f281ab4b61822ccc" Sep 29 09:45:22 crc kubenswrapper[4779]: I0929 09:45:22.424605 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-9d9d94d6f-fkg46"] Sep 29 09:45:22 crc kubenswrapper[4779]: I0929 09:45:22.429600 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-9d9d94d6f-fkg46"] Sep 29 09:45:22 crc kubenswrapper[4779]: I0929 09:45:22.731095 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="71e1e56c-3b30-4233-a2df-3efe864df26a" path="/var/lib/kubelet/pods/71e1e56c-3b30-4233-a2df-3efe864df26a/volumes" Sep 29 09:45:26 crc kubenswrapper[4779]: I0929 09:45:26.902509 4779 scope.go:117] "RemoveContainer" containerID="d0ab6c95b00c1150411a69422d676510c3140992f02481cfca8d7e8d0a9ad74b" Sep 29 09:45:28 crc kubenswrapper[4779]: I0929 09:45:28.089903 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-676856544c-dqj7w"] Sep 29 09:45:28 crc kubenswrapper[4779]: W0929 09:45:28.837365 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9f5b067c_e255_4746_abd6_88e8ec9d8a85.slice/crio-4d0580a08d25f0167f381a7045060f4531a990dda726f495ea25802d3709c80d WatchSource:0}: Error finding container 4d0580a08d25f0167f381a7045060f4531a990dda726f495ea25802d3709c80d: Status 404 returned error can't find the container with id 4d0580a08d25f0167f381a7045060f4531a990dda726f495ea25802d3709c80d Sep 29 09:45:29 crc kubenswrapper[4779]: I0929 09:45:29.446435 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-676856544c-dqj7w" event={"ID":"9f5b067c-e255-4746-abd6-88e8ec9d8a85","Type":"ContainerStarted","Data":"4d0580a08d25f0167f381a7045060f4531a990dda726f495ea25802d3709c80d"} Sep 29 09:45:29 crc kubenswrapper[4779]: I0929 09:45:29.452289 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b7756dccc-dk8xp" event={"ID":"1d2e7b20-7b93-45fb-a5c0-4ab547aa036f","Type":"ContainerStarted","Data":"6e82ee44a5c64f897584e8bd93a90e76b26c4d8ce21b3a0a80fe577e215cce52"} Sep 29 09:45:29 crc kubenswrapper[4779]: I0929 09:45:29.452442 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-b7756dccc-dk8xp" podUID="1d2e7b20-7b93-45fb-a5c0-4ab547aa036f" containerName="dnsmasq-dns" containerID="cri-o://6e82ee44a5c64f897584e8bd93a90e76b26c4d8ce21b3a0a80fe577e215cce52" gracePeriod=10 Sep 29 09:45:29 crc kubenswrapper[4779]: I0929 09:45:29.452480 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-b7756dccc-dk8xp" Sep 29 09:45:29 crc kubenswrapper[4779]: I0929 09:45:29.476383 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-b7756dccc-dk8xp" podStartSLOduration=26.476365286 podStartE2EDuration="26.476365286s" podCreationTimestamp="2025-09-29 09:45:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:45:29.46902012 +0000 UTC m=+961.450344024" watchObservedRunningTime="2025-09-29 09:45:29.476365286 +0000 UTC m=+961.457689190" Sep 29 09:45:30 crc kubenswrapper[4779]: I0929 09:45:30.424264 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b7756dccc-dk8xp" Sep 29 09:45:30 crc kubenswrapper[4779]: I0929 09:45:30.478477 4779 generic.go:334] "Generic (PLEG): container finished" podID="1d2e7b20-7b93-45fb-a5c0-4ab547aa036f" containerID="6e82ee44a5c64f897584e8bd93a90e76b26c4d8ce21b3a0a80fe577e215cce52" exitCode=0 Sep 29 09:45:30 crc kubenswrapper[4779]: I0929 09:45:30.478540 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b7756dccc-dk8xp" event={"ID":"1d2e7b20-7b93-45fb-a5c0-4ab547aa036f","Type":"ContainerDied","Data":"6e82ee44a5c64f897584e8bd93a90e76b26c4d8ce21b3a0a80fe577e215cce52"} Sep 29 09:45:30 crc kubenswrapper[4779]: I0929 09:45:30.478568 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b7756dccc-dk8xp" event={"ID":"1d2e7b20-7b93-45fb-a5c0-4ab547aa036f","Type":"ContainerDied","Data":"d29f065c7f9ff8ba6605e179afaf748c06766a0dbcb2a7536a04623bd059fe0a"} Sep 29 09:45:30 crc kubenswrapper[4779]: I0929 09:45:30.478590 4779 scope.go:117] "RemoveContainer" containerID="6e82ee44a5c64f897584e8bd93a90e76b26c4d8ce21b3a0a80fe577e215cce52" Sep 29 09:45:30 crc kubenswrapper[4779]: I0929 09:45:30.478702 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b7756dccc-dk8xp" Sep 29 09:45:30 crc kubenswrapper[4779]: I0929 09:45:30.481356 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-notifications-server-0" event={"ID":"daa6f578-ea44-4555-be0c-e2b8662386f0","Type":"ContainerStarted","Data":"331f0e0f0bd4e4a5bf2f8d1dce8432cf666be90233449ac92419a403e10a74a8"} Sep 29 09:45:30 crc kubenswrapper[4779]: I0929 09:45:30.485135 4779 generic.go:334] "Generic (PLEG): container finished" podID="503e48ea-44a0-4f04-be6f-4ce12d42b42b" containerID="d4de3a0836698c0c5a09218813a0be27e780d4f3fdb91979e05808d164001bf7" exitCode=0 Sep 29 09:45:30 crc kubenswrapper[4779]: I0929 09:45:30.485181 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-564fc564d5-g4c8f" event={"ID":"503e48ea-44a0-4f04-be6f-4ce12d42b42b","Type":"ContainerDied","Data":"d4de3a0836698c0c5a09218813a0be27e780d4f3fdb91979e05808d164001bf7"} Sep 29 09:45:30 crc kubenswrapper[4779]: I0929 09:45:30.523159 4779 scope.go:117] "RemoveContainer" containerID="c3bc6467ec6e10738d188e16721138de51cc5e9d5fd778e6b3a66714b033ede9" Sep 29 09:45:30 crc kubenswrapper[4779]: I0929 09:45:30.546472 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzfhv\" (UniqueName: \"kubernetes.io/projected/1d2e7b20-7b93-45fb-a5c0-4ab547aa036f-kube-api-access-nzfhv\") pod \"1d2e7b20-7b93-45fb-a5c0-4ab547aa036f\" (UID: \"1d2e7b20-7b93-45fb-a5c0-4ab547aa036f\") " Sep 29 09:45:30 crc kubenswrapper[4779]: I0929 09:45:30.546519 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1d2e7b20-7b93-45fb-a5c0-4ab547aa036f-config\") pod \"1d2e7b20-7b93-45fb-a5c0-4ab547aa036f\" (UID: \"1d2e7b20-7b93-45fb-a5c0-4ab547aa036f\") " Sep 29 09:45:30 crc kubenswrapper[4779]: I0929 09:45:30.546587 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1d2e7b20-7b93-45fb-a5c0-4ab547aa036f-dns-svc\") pod \"1d2e7b20-7b93-45fb-a5c0-4ab547aa036f\" (UID: \"1d2e7b20-7b93-45fb-a5c0-4ab547aa036f\") " Sep 29 09:45:30 crc kubenswrapper[4779]: I0929 09:45:30.562343 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d2e7b20-7b93-45fb-a5c0-4ab547aa036f-kube-api-access-nzfhv" (OuterVolumeSpecName: "kube-api-access-nzfhv") pod "1d2e7b20-7b93-45fb-a5c0-4ab547aa036f" (UID: "1d2e7b20-7b93-45fb-a5c0-4ab547aa036f"). InnerVolumeSpecName "kube-api-access-nzfhv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:45:30 crc kubenswrapper[4779]: I0929 09:45:30.578548 4779 scope.go:117] "RemoveContainer" containerID="6e82ee44a5c64f897584e8bd93a90e76b26c4d8ce21b3a0a80fe577e215cce52" Sep 29 09:45:30 crc kubenswrapper[4779]: E0929 09:45:30.580645 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6e82ee44a5c64f897584e8bd93a90e76b26c4d8ce21b3a0a80fe577e215cce52\": container with ID starting with 6e82ee44a5c64f897584e8bd93a90e76b26c4d8ce21b3a0a80fe577e215cce52 not found: ID does not exist" containerID="6e82ee44a5c64f897584e8bd93a90e76b26c4d8ce21b3a0a80fe577e215cce52" Sep 29 09:45:30 crc kubenswrapper[4779]: I0929 09:45:30.580687 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e82ee44a5c64f897584e8bd93a90e76b26c4d8ce21b3a0a80fe577e215cce52"} err="failed to get container status \"6e82ee44a5c64f897584e8bd93a90e76b26c4d8ce21b3a0a80fe577e215cce52\": rpc error: code = NotFound desc = could not find container \"6e82ee44a5c64f897584e8bd93a90e76b26c4d8ce21b3a0a80fe577e215cce52\": container with ID starting with 6e82ee44a5c64f897584e8bd93a90e76b26c4d8ce21b3a0a80fe577e215cce52 not found: ID does not exist" Sep 29 09:45:30 crc kubenswrapper[4779]: I0929 09:45:30.580712 4779 scope.go:117] "RemoveContainer" containerID="c3bc6467ec6e10738d188e16721138de51cc5e9d5fd778e6b3a66714b033ede9" Sep 29 09:45:30 crc kubenswrapper[4779]: E0929 09:45:30.581109 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c3bc6467ec6e10738d188e16721138de51cc5e9d5fd778e6b3a66714b033ede9\": container with ID starting with c3bc6467ec6e10738d188e16721138de51cc5e9d5fd778e6b3a66714b033ede9 not found: ID does not exist" containerID="c3bc6467ec6e10738d188e16721138de51cc5e9d5fd778e6b3a66714b033ede9" Sep 29 09:45:30 crc kubenswrapper[4779]: I0929 09:45:30.581131 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c3bc6467ec6e10738d188e16721138de51cc5e9d5fd778e6b3a66714b033ede9"} err="failed to get container status \"c3bc6467ec6e10738d188e16721138de51cc5e9d5fd778e6b3a66714b033ede9\": rpc error: code = NotFound desc = could not find container \"c3bc6467ec6e10738d188e16721138de51cc5e9d5fd778e6b3a66714b033ede9\": container with ID starting with c3bc6467ec6e10738d188e16721138de51cc5e9d5fd778e6b3a66714b033ede9 not found: ID does not exist" Sep 29 09:45:30 crc kubenswrapper[4779]: I0929 09:45:30.649010 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzfhv\" (UniqueName: \"kubernetes.io/projected/1d2e7b20-7b93-45fb-a5c0-4ab547aa036f-kube-api-access-nzfhv\") on node \"crc\" DevicePath \"\"" Sep 29 09:45:30 crc kubenswrapper[4779]: I0929 09:45:30.980705 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1d2e7b20-7b93-45fb-a5c0-4ab547aa036f-config" (OuterVolumeSpecName: "config") pod "1d2e7b20-7b93-45fb-a5c0-4ab547aa036f" (UID: "1d2e7b20-7b93-45fb-a5c0-4ab547aa036f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:45:31 crc kubenswrapper[4779]: I0929 09:45:31.010010 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1d2e7b20-7b93-45fb-a5c0-4ab547aa036f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "1d2e7b20-7b93-45fb-a5c0-4ab547aa036f" (UID: "1d2e7b20-7b93-45fb-a5c0-4ab547aa036f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:45:31 crc kubenswrapper[4779]: I0929 09:45:31.059006 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1d2e7b20-7b93-45fb-a5c0-4ab547aa036f-config\") on node \"crc\" DevicePath \"\"" Sep 29 09:45:31 crc kubenswrapper[4779]: I0929 09:45:31.059374 4779 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1d2e7b20-7b93-45fb-a5c0-4ab547aa036f-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 09:45:31 crc kubenswrapper[4779]: I0929 09:45:31.127773 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b7756dccc-dk8xp"] Sep 29 09:45:31 crc kubenswrapper[4779]: I0929 09:45:31.134330 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-b7756dccc-dk8xp"] Sep 29 09:45:31 crc kubenswrapper[4779]: I0929 09:45:31.493620 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"8faade2a-9a07-45b9-99e4-b448b64afaaa","Type":"ContainerStarted","Data":"d06451a6c3888fe27bb17e40eff8efe95ff1aed4e982b6c94a8c786dad8579f6"} Sep 29 09:45:31 crc kubenswrapper[4779]: I0929 09:45:31.496105 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-bbznl" event={"ID":"a2745c65-9c13-46fa-b3e6-37731ad17208","Type":"ContainerStarted","Data":"a6a4ec9e764edf1446dc478a0713a2a544f289e3d23eee24de034e3ffad72152"} Sep 29 09:45:31 crc kubenswrapper[4779]: I0929 09:45:31.499641 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-564fc564d5-g4c8f" event={"ID":"503e48ea-44a0-4f04-be6f-4ce12d42b42b","Type":"ContainerStarted","Data":"75c905966281b9171fe174486f5a7a989ac0899faeb23457a0464b6c9b96911e"} Sep 29 09:45:31 crc kubenswrapper[4779]: I0929 09:45:31.499710 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-564fc564d5-g4c8f" Sep 29 09:45:31 crc kubenswrapper[4779]: I0929 09:45:31.501466 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"75fdeaf0-3dbf-4249-b03d-8b59289a2d58","Type":"ContainerStarted","Data":"3e2d494ebfb0b892b126c480a083ee36d0afa548bbbe17ca5cfc3f070a54b418"} Sep 29 09:45:31 crc kubenswrapper[4779]: I0929 09:45:31.501653 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Sep 29 09:45:31 crc kubenswrapper[4779]: I0929 09:45:31.503299 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"eb6a2725-2f24-4e1f-9791-d544f59eddeb","Type":"ContainerStarted","Data":"a1341959cf67b778d67e535ec3749d70826d4a07b43880fc7d9a61c1370c671a"} Sep 29 09:45:31 crc kubenswrapper[4779]: I0929 09:45:31.503413 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Sep 29 09:45:31 crc kubenswrapper[4779]: I0929 09:45:31.505706 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-dzjsq" event={"ID":"290bdf85-850a-4b79-85f7-dc2e662e0ae9","Type":"ContainerStarted","Data":"acb4bf187d5d5469c3a75138abd9cc801ed5f1eb0f44ede73a6819589c7d4619"} Sep 29 09:45:31 crc kubenswrapper[4779]: I0929 09:45:31.505869 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-dzjsq" Sep 29 09:45:31 crc kubenswrapper[4779]: I0929 09:45:31.507600 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"779f829e-6240-47a5-8d8d-9e279d316df7","Type":"ContainerStarted","Data":"02726faa48a5722b4e3e907fbde7d7ab6b4a0c58e2de3b1c06eb717c506392ff"} Sep 29 09:45:31 crc kubenswrapper[4779]: I0929 09:45:31.509727 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"00e4a196-0951-4c3a-9a1e-65e24cf2e6a0","Type":"ContainerStarted","Data":"c7230301b3a10d84801f9d954e1f107d23a1cbc05f83deafdfa6aa40d987f5e6"} Sep 29 09:45:31 crc kubenswrapper[4779]: I0929 09:45:31.511844 4779 generic.go:334] "Generic (PLEG): container finished" podID="9f5b067c-e255-4746-abd6-88e8ec9d8a85" containerID="d561d8a39157be803a0cc8a06b5cd5f4f8755b1030549b63d1872bbb397f536b" exitCode=0 Sep 29 09:45:31 crc kubenswrapper[4779]: I0929 09:45:31.512025 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-676856544c-dqj7w" event={"ID":"9f5b067c-e255-4746-abd6-88e8ec9d8a85","Type":"ContainerDied","Data":"d561d8a39157be803a0cc8a06b5cd5f4f8755b1030549b63d1872bbb397f536b"} Sep 29 09:45:31 crc kubenswrapper[4779]: I0929 09:45:31.514310 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-l5mxb" event={"ID":"6553a386-d57f-483f-98ed-99ba90b035c6","Type":"ContainerStarted","Data":"456726f2ae031b221547b5d2447a94157fb6ad988fe0f320f4677c1d0aa5dc3c"} Sep 29 09:45:31 crc kubenswrapper[4779]: I0929 09:45:31.525304 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"65145612-cd78-4a1f-84c3-ea831e0c83b0","Type":"ContainerStarted","Data":"efaca20a23c4660c4c77db59776a2aa8b62e8c32064ef3978e27aa2a77937821"} Sep 29 09:45:31 crc kubenswrapper[4779]: I0929 09:45:31.527643 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"d00ff60f-2316-40a6-a874-c7f4e6506a48","Type":"ContainerStarted","Data":"3cb6e1a796dd2479d32d30fd75e9e8db2af7baa042ae1ad549d5a8113c6d1e9e"} Sep 29 09:45:31 crc kubenswrapper[4779]: I0929 09:45:31.527696 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"d00ff60f-2316-40a6-a874-c7f4e6506a48","Type":"ContainerStarted","Data":"7f43133333d45d9dac79b3ff74c402191b5e66d10fa9b87e394d09250c2abcdf"} Sep 29 09:45:31 crc kubenswrapper[4779]: I0929 09:45:31.529129 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"bb5f88d2-6663-4ed6-a7a7-93ee500c9edf","Type":"ContainerStarted","Data":"44630d9f8ed818223d641973d2ca080695e7400ac961712819ae67e9a1b196de"} Sep 29 09:45:31 crc kubenswrapper[4779]: I0929 09:45:31.577204 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-dzjsq" podStartSLOduration=7.904405327 podStartE2EDuration="18.57718685s" podCreationTimestamp="2025-09-29 09:45:13 +0000 UTC" firstStartedPulling="2025-09-29 09:45:18.483752162 +0000 UTC m=+950.465076066" lastFinishedPulling="2025-09-29 09:45:29.156533685 +0000 UTC m=+961.137857589" observedRunningTime="2025-09-29 09:45:31.574771939 +0000 UTC m=+963.556095843" watchObservedRunningTime="2025-09-29 09:45:31.57718685 +0000 UTC m=+963.558510754" Sep 29 09:45:31 crc kubenswrapper[4779]: I0929 09:45:31.593461 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-564fc564d5-g4c8f" podStartSLOduration=12.593446028 podStartE2EDuration="12.593446028s" podCreationTimestamp="2025-09-29 09:45:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:45:31.588849823 +0000 UTC m=+963.570173727" watchObservedRunningTime="2025-09-29 09:45:31.593446028 +0000 UTC m=+963.574769932" Sep 29 09:45:31 crc kubenswrapper[4779]: I0929 09:45:31.631797 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-l5mxb" podStartSLOduration=4.054653235 podStartE2EDuration="12.631775945s" podCreationTimestamp="2025-09-29 09:45:19 +0000 UTC" firstStartedPulling="2025-09-29 09:45:20.89258716 +0000 UTC m=+952.873911064" lastFinishedPulling="2025-09-29 09:45:29.46970987 +0000 UTC m=+961.451033774" observedRunningTime="2025-09-29 09:45:31.600558457 +0000 UTC m=+963.581882361" watchObservedRunningTime="2025-09-29 09:45:31.631775945 +0000 UTC m=+963.613099849" Sep 29 09:45:31 crc kubenswrapper[4779]: I0929 09:45:31.723865 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=14.163231305 podStartE2EDuration="23.723848971s" podCreationTimestamp="2025-09-29 09:45:08 +0000 UTC" firstStartedPulling="2025-09-29 09:45:18.099053405 +0000 UTC m=+950.080377309" lastFinishedPulling="2025-09-29 09:45:27.659671061 +0000 UTC m=+959.640994975" observedRunningTime="2025-09-29 09:45:31.673526822 +0000 UTC m=+963.654850736" watchObservedRunningTime="2025-09-29 09:45:31.723848971 +0000 UTC m=+963.705172875" Sep 29 09:45:31 crc kubenswrapper[4779]: I0929 09:45:31.726105 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=9.918706506 podStartE2EDuration="21.726095037s" podCreationTimestamp="2025-09-29 09:45:10 +0000 UTC" firstStartedPulling="2025-09-29 09:45:18.450247607 +0000 UTC m=+950.431571501" lastFinishedPulling="2025-09-29 09:45:30.257636128 +0000 UTC m=+962.238960032" observedRunningTime="2025-09-29 09:45:31.686284077 +0000 UTC m=+963.667607971" watchObservedRunningTime="2025-09-29 09:45:31.726095037 +0000 UTC m=+963.707418931" Sep 29 09:45:32 crc kubenswrapper[4779]: I0929 09:45:32.557881 4779 generic.go:334] "Generic (PLEG): container finished" podID="a2745c65-9c13-46fa-b3e6-37731ad17208" containerID="a6a4ec9e764edf1446dc478a0713a2a544f289e3d23eee24de034e3ffad72152" exitCode=0 Sep 29 09:45:32 crc kubenswrapper[4779]: I0929 09:45:32.560454 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-bbznl" event={"ID":"a2745c65-9c13-46fa-b3e6-37731ad17208","Type":"ContainerDied","Data":"a6a4ec9e764edf1446dc478a0713a2a544f289e3d23eee24de034e3ffad72152"} Sep 29 09:45:32 crc kubenswrapper[4779]: I0929 09:45:32.624511 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=6.478398538 podStartE2EDuration="16.624472001s" podCreationTimestamp="2025-09-29 09:45:16 +0000 UTC" firstStartedPulling="2025-09-29 09:45:19.014081909 +0000 UTC m=+950.995405813" lastFinishedPulling="2025-09-29 09:45:29.160155372 +0000 UTC m=+961.141479276" observedRunningTime="2025-09-29 09:45:32.622803292 +0000 UTC m=+964.604127216" watchObservedRunningTime="2025-09-29 09:45:32.624472001 +0000 UTC m=+964.605795905" Sep 29 09:45:32 crc kubenswrapper[4779]: I0929 09:45:32.726361 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d2e7b20-7b93-45fb-a5c0-4ab547aa036f" path="/var/lib/kubelet/pods/1d2e7b20-7b93-45fb-a5c0-4ab547aa036f/volumes" Sep 29 09:45:33 crc kubenswrapper[4779]: I0929 09:45:33.261192 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Sep 29 09:45:33 crc kubenswrapper[4779]: I0929 09:45:33.261237 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Sep 29 09:45:33 crc kubenswrapper[4779]: I0929 09:45:33.300423 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Sep 29 09:45:35 crc kubenswrapper[4779]: I0929 09:45:35.586923 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"65145612-cd78-4a1f-84c3-ea831e0c83b0","Type":"ContainerStarted","Data":"5ba55adbfd72cfd867630618427387ca72de9d33be885c6742d1a857aa8983d0"} Sep 29 09:45:36 crc kubenswrapper[4779]: I0929 09:45:36.616405 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-bbznl" event={"ID":"a2745c65-9c13-46fa-b3e6-37731ad17208","Type":"ContainerStarted","Data":"0384d489702743d1833645717c5dd3f45e4c8ac38509027efeb4f1205c1873df"} Sep 29 09:45:36 crc kubenswrapper[4779]: I0929 09:45:36.619003 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"b3627e10-4513-49fd-bdf5-0a83db9d8561","Type":"ContainerStarted","Data":"835a58cf4237846af5fe498031d03b152802d1e9a85697ab90dfb70b61219995"} Sep 29 09:45:36 crc kubenswrapper[4779]: I0929 09:45:36.624606 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-676856544c-dqj7w" event={"ID":"9f5b067c-e255-4746-abd6-88e8ec9d8a85","Type":"ContainerStarted","Data":"d81578ca79f4ece4b43a97ad32ad5185b8172d4d56bb7e9fbf278ab74214d0b6"} Sep 29 09:45:36 crc kubenswrapper[4779]: I0929 09:45:36.624638 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-676856544c-dqj7w" Sep 29 09:45:36 crc kubenswrapper[4779]: I0929 09:45:36.733572 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-676856544c-dqj7w" podStartSLOduration=17.733556271 podStartE2EDuration="17.733556271s" podCreationTimestamp="2025-09-29 09:45:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:45:36.683560272 +0000 UTC m=+968.664884206" watchObservedRunningTime="2025-09-29 09:45:36.733556271 +0000 UTC m=+968.714880175" Sep 29 09:45:36 crc kubenswrapper[4779]: I0929 09:45:36.736122 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=13.427101294 podStartE2EDuration="23.736114796s" podCreationTimestamp="2025-09-29 09:45:13 +0000 UTC" firstStartedPulling="2025-09-29 09:45:18.850156981 +0000 UTC m=+950.831480885" lastFinishedPulling="2025-09-29 09:45:29.159170463 +0000 UTC m=+961.140494387" observedRunningTime="2025-09-29 09:45:36.704717973 +0000 UTC m=+968.686041877" watchObservedRunningTime="2025-09-29 09:45:36.736114796 +0000 UTC m=+968.717438700" Sep 29 09:45:37 crc kubenswrapper[4779]: I0929 09:45:37.634167 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-bbznl" event={"ID":"a2745c65-9c13-46fa-b3e6-37731ad17208","Type":"ContainerStarted","Data":"224a9c1b9cd4d5de948ae9393e0e9c236c2c0cabc5ed87491bd20ea9f7037c61"} Sep 29 09:45:37 crc kubenswrapper[4779]: I0929 09:45:37.671075 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-bbznl" podStartSLOduration=14.125596553 podStartE2EDuration="24.671056325s" podCreationTimestamp="2025-09-29 09:45:13 +0000 UTC" firstStartedPulling="2025-09-29 09:45:18.264111446 +0000 UTC m=+950.245435350" lastFinishedPulling="2025-09-29 09:45:28.809571218 +0000 UTC m=+960.790895122" observedRunningTime="2025-09-29 09:45:37.659427433 +0000 UTC m=+969.640751347" watchObservedRunningTime="2025-09-29 09:45:37.671056325 +0000 UTC m=+969.652380229" Sep 29 09:45:38 crc kubenswrapper[4779]: I0929 09:45:38.305694 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Sep 29 09:45:38 crc kubenswrapper[4779]: I0929 09:45:38.639741 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-bbznl" Sep 29 09:45:38 crc kubenswrapper[4779]: I0929 09:45:38.639779 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-bbznl" Sep 29 09:45:38 crc kubenswrapper[4779]: I0929 09:45:38.786763 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Sep 29 09:45:38 crc kubenswrapper[4779]: I0929 09:45:38.824581 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Sep 29 09:45:38 crc kubenswrapper[4779]: I0929 09:45:38.857800 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Sep 29 09:45:39 crc kubenswrapper[4779]: I0929 09:45:39.646320 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Sep 29 09:45:39 crc kubenswrapper[4779]: I0929 09:45:39.690555 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Sep 29 09:45:39 crc kubenswrapper[4779]: I0929 09:45:39.705596 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-564fc564d5-g4c8f" Sep 29 09:45:39 crc kubenswrapper[4779]: I0929 09:45:39.965405 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Sep 29 09:45:39 crc kubenswrapper[4779]: E0929 09:45:39.965867 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71e1e56c-3b30-4233-a2df-3efe864df26a" containerName="dnsmasq-dns" Sep 29 09:45:39 crc kubenswrapper[4779]: I0929 09:45:39.965892 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="71e1e56c-3b30-4233-a2df-3efe864df26a" containerName="dnsmasq-dns" Sep 29 09:45:39 crc kubenswrapper[4779]: E0929 09:45:39.965936 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71e1e56c-3b30-4233-a2df-3efe864df26a" containerName="init" Sep 29 09:45:39 crc kubenswrapper[4779]: I0929 09:45:39.965947 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="71e1e56c-3b30-4233-a2df-3efe864df26a" containerName="init" Sep 29 09:45:39 crc kubenswrapper[4779]: E0929 09:45:39.965973 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d2e7b20-7b93-45fb-a5c0-4ab547aa036f" containerName="dnsmasq-dns" Sep 29 09:45:39 crc kubenswrapper[4779]: I0929 09:45:39.965981 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d2e7b20-7b93-45fb-a5c0-4ab547aa036f" containerName="dnsmasq-dns" Sep 29 09:45:39 crc kubenswrapper[4779]: E0929 09:45:39.965995 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d2e7b20-7b93-45fb-a5c0-4ab547aa036f" containerName="init" Sep 29 09:45:39 crc kubenswrapper[4779]: I0929 09:45:39.966002 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d2e7b20-7b93-45fb-a5c0-4ab547aa036f" containerName="init" Sep 29 09:45:39 crc kubenswrapper[4779]: E0929 09:45:39.966015 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53d6662d-fb9b-4a71-84e7-13716b4a18b1" containerName="init" Sep 29 09:45:39 crc kubenswrapper[4779]: I0929 09:45:39.966022 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="53d6662d-fb9b-4a71-84e7-13716b4a18b1" containerName="init" Sep 29 09:45:39 crc kubenswrapper[4779]: I0929 09:45:39.966239 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="53d6662d-fb9b-4a71-84e7-13716b4a18b1" containerName="init" Sep 29 09:45:39 crc kubenswrapper[4779]: I0929 09:45:39.966255 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d2e7b20-7b93-45fb-a5c0-4ab547aa036f" containerName="dnsmasq-dns" Sep 29 09:45:39 crc kubenswrapper[4779]: I0929 09:45:39.966266 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="71e1e56c-3b30-4233-a2df-3efe864df26a" containerName="dnsmasq-dns" Sep 29 09:45:39 crc kubenswrapper[4779]: I0929 09:45:39.967353 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Sep 29 09:45:39 crc kubenswrapper[4779]: I0929 09:45:39.971103 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Sep 29 09:45:39 crc kubenswrapper[4779]: I0929 09:45:39.974992 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Sep 29 09:45:39 crc kubenswrapper[4779]: I0929 09:45:39.975196 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-jrzc5" Sep 29 09:45:39 crc kubenswrapper[4779]: I0929 09:45:39.975321 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Sep 29 09:45:39 crc kubenswrapper[4779]: I0929 09:45:39.975818 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Sep 29 09:45:40 crc kubenswrapper[4779]: I0929 09:45:40.059196 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1f7e1e01-34a6-4fc9-8c73-597aa6dc5a4a-scripts\") pod \"ovn-northd-0\" (UID: \"1f7e1e01-34a6-4fc9-8c73-597aa6dc5a4a\") " pod="openstack/ovn-northd-0" Sep 29 09:45:40 crc kubenswrapper[4779]: I0929 09:45:40.059271 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/1f7e1e01-34a6-4fc9-8c73-597aa6dc5a4a-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"1f7e1e01-34a6-4fc9-8c73-597aa6dc5a4a\") " pod="openstack/ovn-northd-0" Sep 29 09:45:40 crc kubenswrapper[4779]: I0929 09:45:40.059310 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f7e1e01-34a6-4fc9-8c73-597aa6dc5a4a-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"1f7e1e01-34a6-4fc9-8c73-597aa6dc5a4a\") " pod="openstack/ovn-northd-0" Sep 29 09:45:40 crc kubenswrapper[4779]: I0929 09:45:40.059335 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zglns\" (UniqueName: \"kubernetes.io/projected/1f7e1e01-34a6-4fc9-8c73-597aa6dc5a4a-kube-api-access-zglns\") pod \"ovn-northd-0\" (UID: \"1f7e1e01-34a6-4fc9-8c73-597aa6dc5a4a\") " pod="openstack/ovn-northd-0" Sep 29 09:45:40 crc kubenswrapper[4779]: I0929 09:45:40.059540 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/1f7e1e01-34a6-4fc9-8c73-597aa6dc5a4a-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"1f7e1e01-34a6-4fc9-8c73-597aa6dc5a4a\") " pod="openstack/ovn-northd-0" Sep 29 09:45:40 crc kubenswrapper[4779]: I0929 09:45:40.059655 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f7e1e01-34a6-4fc9-8c73-597aa6dc5a4a-config\") pod \"ovn-northd-0\" (UID: \"1f7e1e01-34a6-4fc9-8c73-597aa6dc5a4a\") " pod="openstack/ovn-northd-0" Sep 29 09:45:40 crc kubenswrapper[4779]: I0929 09:45:40.059887 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/1f7e1e01-34a6-4fc9-8c73-597aa6dc5a4a-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"1f7e1e01-34a6-4fc9-8c73-597aa6dc5a4a\") " pod="openstack/ovn-northd-0" Sep 29 09:45:40 crc kubenswrapper[4779]: I0929 09:45:40.162499 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/1f7e1e01-34a6-4fc9-8c73-597aa6dc5a4a-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"1f7e1e01-34a6-4fc9-8c73-597aa6dc5a4a\") " pod="openstack/ovn-northd-0" Sep 29 09:45:40 crc kubenswrapper[4779]: I0929 09:45:40.162631 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f7e1e01-34a6-4fc9-8c73-597aa6dc5a4a-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"1f7e1e01-34a6-4fc9-8c73-597aa6dc5a4a\") " pod="openstack/ovn-northd-0" Sep 29 09:45:40 crc kubenswrapper[4779]: I0929 09:45:40.162668 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zglns\" (UniqueName: \"kubernetes.io/projected/1f7e1e01-34a6-4fc9-8c73-597aa6dc5a4a-kube-api-access-zglns\") pod \"ovn-northd-0\" (UID: \"1f7e1e01-34a6-4fc9-8c73-597aa6dc5a4a\") " pod="openstack/ovn-northd-0" Sep 29 09:45:40 crc kubenswrapper[4779]: I0929 09:45:40.162731 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/1f7e1e01-34a6-4fc9-8c73-597aa6dc5a4a-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"1f7e1e01-34a6-4fc9-8c73-597aa6dc5a4a\") " pod="openstack/ovn-northd-0" Sep 29 09:45:40 crc kubenswrapper[4779]: I0929 09:45:40.162772 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f7e1e01-34a6-4fc9-8c73-597aa6dc5a4a-config\") pod \"ovn-northd-0\" (UID: \"1f7e1e01-34a6-4fc9-8c73-597aa6dc5a4a\") " pod="openstack/ovn-northd-0" Sep 29 09:45:40 crc kubenswrapper[4779]: I0929 09:45:40.162812 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/1f7e1e01-34a6-4fc9-8c73-597aa6dc5a4a-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"1f7e1e01-34a6-4fc9-8c73-597aa6dc5a4a\") " pod="openstack/ovn-northd-0" Sep 29 09:45:40 crc kubenswrapper[4779]: I0929 09:45:40.162990 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1f7e1e01-34a6-4fc9-8c73-597aa6dc5a4a-scripts\") pod \"ovn-northd-0\" (UID: \"1f7e1e01-34a6-4fc9-8c73-597aa6dc5a4a\") " pod="openstack/ovn-northd-0" Sep 29 09:45:40 crc kubenswrapper[4779]: I0929 09:45:40.163585 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/1f7e1e01-34a6-4fc9-8c73-597aa6dc5a4a-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"1f7e1e01-34a6-4fc9-8c73-597aa6dc5a4a\") " pod="openstack/ovn-northd-0" Sep 29 09:45:40 crc kubenswrapper[4779]: I0929 09:45:40.163964 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f7e1e01-34a6-4fc9-8c73-597aa6dc5a4a-config\") pod \"ovn-northd-0\" (UID: \"1f7e1e01-34a6-4fc9-8c73-597aa6dc5a4a\") " pod="openstack/ovn-northd-0" Sep 29 09:45:40 crc kubenswrapper[4779]: I0929 09:45:40.164779 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1f7e1e01-34a6-4fc9-8c73-597aa6dc5a4a-scripts\") pod \"ovn-northd-0\" (UID: \"1f7e1e01-34a6-4fc9-8c73-597aa6dc5a4a\") " pod="openstack/ovn-northd-0" Sep 29 09:45:40 crc kubenswrapper[4779]: I0929 09:45:40.169276 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/1f7e1e01-34a6-4fc9-8c73-597aa6dc5a4a-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"1f7e1e01-34a6-4fc9-8c73-597aa6dc5a4a\") " pod="openstack/ovn-northd-0" Sep 29 09:45:40 crc kubenswrapper[4779]: I0929 09:45:40.171079 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f7e1e01-34a6-4fc9-8c73-597aa6dc5a4a-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"1f7e1e01-34a6-4fc9-8c73-597aa6dc5a4a\") " pod="openstack/ovn-northd-0" Sep 29 09:45:40 crc kubenswrapper[4779]: I0929 09:45:40.171213 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/1f7e1e01-34a6-4fc9-8c73-597aa6dc5a4a-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"1f7e1e01-34a6-4fc9-8c73-597aa6dc5a4a\") " pod="openstack/ovn-northd-0" Sep 29 09:45:40 crc kubenswrapper[4779]: I0929 09:45:40.179797 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zglns\" (UniqueName: \"kubernetes.io/projected/1f7e1e01-34a6-4fc9-8c73-597aa6dc5a4a-kube-api-access-zglns\") pod \"ovn-northd-0\" (UID: \"1f7e1e01-34a6-4fc9-8c73-597aa6dc5a4a\") " pod="openstack/ovn-northd-0" Sep 29 09:45:40 crc kubenswrapper[4779]: I0929 09:45:40.294716 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Sep 29 09:45:40 crc kubenswrapper[4779]: I0929 09:45:40.657819 4779 generic.go:334] "Generic (PLEG): container finished" podID="bb5f88d2-6663-4ed6-a7a7-93ee500c9edf" containerID="44630d9f8ed818223d641973d2ca080695e7400ac961712819ae67e9a1b196de" exitCode=0 Sep 29 09:45:40 crc kubenswrapper[4779]: I0929 09:45:40.657934 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"bb5f88d2-6663-4ed6-a7a7-93ee500c9edf","Type":"ContainerDied","Data":"44630d9f8ed818223d641973d2ca080695e7400ac961712819ae67e9a1b196de"} Sep 29 09:45:40 crc kubenswrapper[4779]: I0929 09:45:40.660488 4779 generic.go:334] "Generic (PLEG): container finished" podID="00e4a196-0951-4c3a-9a1e-65e24cf2e6a0" containerID="c7230301b3a10d84801f9d954e1f107d23a1cbc05f83deafdfa6aa40d987f5e6" exitCode=0 Sep 29 09:45:40 crc kubenswrapper[4779]: I0929 09:45:40.660542 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"00e4a196-0951-4c3a-9a1e-65e24cf2e6a0","Type":"ContainerDied","Data":"c7230301b3a10d84801f9d954e1f107d23a1cbc05f83deafdfa6aa40d987f5e6"} Sep 29 09:45:40 crc kubenswrapper[4779]: I0929 09:45:40.679211 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Sep 29 09:45:40 crc kubenswrapper[4779]: I0929 09:45:40.809102 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Sep 29 09:45:40 crc kubenswrapper[4779]: W0929 09:45:40.817616 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1f7e1e01_34a6_4fc9_8c73_597aa6dc5a4a.slice/crio-bff462dfa5c5907b7a29f540bb9936799f7bd93c53d7566ec1368add21f1d836 WatchSource:0}: Error finding container bff462dfa5c5907b7a29f540bb9936799f7bd93c53d7566ec1368add21f1d836: Status 404 returned error can't find the container with id bff462dfa5c5907b7a29f540bb9936799f7bd93c53d7566ec1368add21f1d836 Sep 29 09:45:41 crc kubenswrapper[4779]: I0929 09:45:41.678310 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"bb5f88d2-6663-4ed6-a7a7-93ee500c9edf","Type":"ContainerStarted","Data":"4bfc2c0b89d238747291d0c3b67bccc9b126bb9afe120693d340d40654816295"} Sep 29 09:45:41 crc kubenswrapper[4779]: I0929 09:45:41.681542 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"00e4a196-0951-4c3a-9a1e-65e24cf2e6a0","Type":"ContainerStarted","Data":"76cb01a1a47ff518fa0ec480deb24ce29d05c95c0a0f1d521ba8f296df34d41e"} Sep 29 09:45:41 crc kubenswrapper[4779]: I0929 09:45:41.684847 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"1f7e1e01-34a6-4fc9-8c73-597aa6dc5a4a","Type":"ContainerStarted","Data":"4e9b95e15b19942ce0e3c407a59bcfb7310ecd96c15a7021531f812ff8d894e4"} Sep 29 09:45:41 crc kubenswrapper[4779]: I0929 09:45:41.684886 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Sep 29 09:45:41 crc kubenswrapper[4779]: I0929 09:45:41.684896 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"1f7e1e01-34a6-4fc9-8c73-597aa6dc5a4a","Type":"ContainerStarted","Data":"3b068655255252e14ddd99edb6210fedfbdfa721d2ccc2d7883b764c6d171a73"} Sep 29 09:45:41 crc kubenswrapper[4779]: I0929 09:45:41.684918 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"1f7e1e01-34a6-4fc9-8c73-597aa6dc5a4a","Type":"ContainerStarted","Data":"bff462dfa5c5907b7a29f540bb9936799f7bd93c53d7566ec1368add21f1d836"} Sep 29 09:45:41 crc kubenswrapper[4779]: I0929 09:45:41.703803 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=24.578826268 podStartE2EDuration="35.703785471s" podCreationTimestamp="2025-09-29 09:45:06 +0000 UTC" firstStartedPulling="2025-09-29 09:45:18.137174206 +0000 UTC m=+950.118498110" lastFinishedPulling="2025-09-29 09:45:29.262133409 +0000 UTC m=+961.243457313" observedRunningTime="2025-09-29 09:45:41.695960811 +0000 UTC m=+973.677284715" watchObservedRunningTime="2025-09-29 09:45:41.703785471 +0000 UTC m=+973.685109375" Sep 29 09:45:41 crc kubenswrapper[4779]: I0929 09:45:41.715447 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=23.414866468 podStartE2EDuration="34.715429093s" podCreationTimestamp="2025-09-29 09:45:07 +0000 UTC" firstStartedPulling="2025-09-29 09:45:18.096400157 +0000 UTC m=+950.077724071" lastFinishedPulling="2025-09-29 09:45:29.396962792 +0000 UTC m=+961.378286696" observedRunningTime="2025-09-29 09:45:41.713408014 +0000 UTC m=+973.694731928" watchObservedRunningTime="2025-09-29 09:45:41.715429093 +0000 UTC m=+973.696753007" Sep 29 09:45:41 crc kubenswrapper[4779]: I0929 09:45:41.738057 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.2946435259999998 podStartE2EDuration="2.738028907s" podCreationTimestamp="2025-09-29 09:45:39 +0000 UTC" firstStartedPulling="2025-09-29 09:45:40.82219318 +0000 UTC m=+972.803517084" lastFinishedPulling="2025-09-29 09:45:41.265578571 +0000 UTC m=+973.246902465" observedRunningTime="2025-09-29 09:45:41.734016329 +0000 UTC m=+973.715340253" watchObservedRunningTime="2025-09-29 09:45:41.738028907 +0000 UTC m=+973.719352821" Sep 29 09:45:42 crc kubenswrapper[4779]: I0929 09:45:42.691669 4779 generic.go:334] "Generic (PLEG): container finished" podID="b3627e10-4513-49fd-bdf5-0a83db9d8561" containerID="835a58cf4237846af5fe498031d03b152802d1e9a85697ab90dfb70b61219995" exitCode=0 Sep 29 09:45:42 crc kubenswrapper[4779]: I0929 09:45:42.691776 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"b3627e10-4513-49fd-bdf5-0a83db9d8561","Type":"ContainerDied","Data":"835a58cf4237846af5fe498031d03b152802d1e9a85697ab90dfb70b61219995"} Sep 29 09:45:44 crc kubenswrapper[4779]: I0929 09:45:44.974837 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-676856544c-dqj7w" Sep 29 09:45:45 crc kubenswrapper[4779]: I0929 09:45:45.023550 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-564fc564d5-g4c8f"] Sep 29 09:45:45 crc kubenswrapper[4779]: I0929 09:45:45.023818 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-564fc564d5-g4c8f" podUID="503e48ea-44a0-4f04-be6f-4ce12d42b42b" containerName="dnsmasq-dns" containerID="cri-o://75c905966281b9171fe174486f5a7a989ac0899faeb23457a0464b6c9b96911e" gracePeriod=10 Sep 29 09:45:45 crc kubenswrapper[4779]: I0929 09:45:45.537408 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-564fc564d5-g4c8f" Sep 29 09:45:45 crc kubenswrapper[4779]: I0929 09:45:45.665513 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/503e48ea-44a0-4f04-be6f-4ce12d42b42b-ovsdbserver-nb\") pod \"503e48ea-44a0-4f04-be6f-4ce12d42b42b\" (UID: \"503e48ea-44a0-4f04-be6f-4ce12d42b42b\") " Sep 29 09:45:45 crc kubenswrapper[4779]: I0929 09:45:45.665608 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/503e48ea-44a0-4f04-be6f-4ce12d42b42b-dns-svc\") pod \"503e48ea-44a0-4f04-be6f-4ce12d42b42b\" (UID: \"503e48ea-44a0-4f04-be6f-4ce12d42b42b\") " Sep 29 09:45:45 crc kubenswrapper[4779]: I0929 09:45:45.665664 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tvf5m\" (UniqueName: \"kubernetes.io/projected/503e48ea-44a0-4f04-be6f-4ce12d42b42b-kube-api-access-tvf5m\") pod \"503e48ea-44a0-4f04-be6f-4ce12d42b42b\" (UID: \"503e48ea-44a0-4f04-be6f-4ce12d42b42b\") " Sep 29 09:45:45 crc kubenswrapper[4779]: I0929 09:45:45.665719 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/503e48ea-44a0-4f04-be6f-4ce12d42b42b-config\") pod \"503e48ea-44a0-4f04-be6f-4ce12d42b42b\" (UID: \"503e48ea-44a0-4f04-be6f-4ce12d42b42b\") " Sep 29 09:45:45 crc kubenswrapper[4779]: I0929 09:45:45.676080 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/503e48ea-44a0-4f04-be6f-4ce12d42b42b-kube-api-access-tvf5m" (OuterVolumeSpecName: "kube-api-access-tvf5m") pod "503e48ea-44a0-4f04-be6f-4ce12d42b42b" (UID: "503e48ea-44a0-4f04-be6f-4ce12d42b42b"). InnerVolumeSpecName "kube-api-access-tvf5m". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:45:45 crc kubenswrapper[4779]: I0929 09:45:45.712979 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/503e48ea-44a0-4f04-be6f-4ce12d42b42b-config" (OuterVolumeSpecName: "config") pod "503e48ea-44a0-4f04-be6f-4ce12d42b42b" (UID: "503e48ea-44a0-4f04-be6f-4ce12d42b42b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:45:45 crc kubenswrapper[4779]: I0929 09:45:45.714524 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/503e48ea-44a0-4f04-be6f-4ce12d42b42b-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "503e48ea-44a0-4f04-be6f-4ce12d42b42b" (UID: "503e48ea-44a0-4f04-be6f-4ce12d42b42b"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:45:45 crc kubenswrapper[4779]: I0929 09:45:45.715625 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/503e48ea-44a0-4f04-be6f-4ce12d42b42b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "503e48ea-44a0-4f04-be6f-4ce12d42b42b" (UID: "503e48ea-44a0-4f04-be6f-4ce12d42b42b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:45:45 crc kubenswrapper[4779]: I0929 09:45:45.716676 4779 generic.go:334] "Generic (PLEG): container finished" podID="503e48ea-44a0-4f04-be6f-4ce12d42b42b" containerID="75c905966281b9171fe174486f5a7a989ac0899faeb23457a0464b6c9b96911e" exitCode=0 Sep 29 09:45:45 crc kubenswrapper[4779]: I0929 09:45:45.716711 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-564fc564d5-g4c8f" event={"ID":"503e48ea-44a0-4f04-be6f-4ce12d42b42b","Type":"ContainerDied","Data":"75c905966281b9171fe174486f5a7a989ac0899faeb23457a0464b6c9b96911e"} Sep 29 09:45:45 crc kubenswrapper[4779]: I0929 09:45:45.716729 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-564fc564d5-g4c8f" event={"ID":"503e48ea-44a0-4f04-be6f-4ce12d42b42b","Type":"ContainerDied","Data":"b69306b0cea280cb9332535de85efa0a459f525e96234093afebeaff3f1bbc16"} Sep 29 09:45:45 crc kubenswrapper[4779]: I0929 09:45:45.716747 4779 scope.go:117] "RemoveContainer" containerID="75c905966281b9171fe174486f5a7a989ac0899faeb23457a0464b6c9b96911e" Sep 29 09:45:45 crc kubenswrapper[4779]: I0929 09:45:45.716829 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-564fc564d5-g4c8f" Sep 29 09:45:45 crc kubenswrapper[4779]: I0929 09:45:45.769947 4779 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/503e48ea-44a0-4f04-be6f-4ce12d42b42b-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 09:45:45 crc kubenswrapper[4779]: I0929 09:45:45.770013 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tvf5m\" (UniqueName: \"kubernetes.io/projected/503e48ea-44a0-4f04-be6f-4ce12d42b42b-kube-api-access-tvf5m\") on node \"crc\" DevicePath \"\"" Sep 29 09:45:45 crc kubenswrapper[4779]: I0929 09:45:45.770031 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/503e48ea-44a0-4f04-be6f-4ce12d42b42b-config\") on node \"crc\" DevicePath \"\"" Sep 29 09:45:45 crc kubenswrapper[4779]: I0929 09:45:45.770041 4779 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/503e48ea-44a0-4f04-be6f-4ce12d42b42b-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 09:45:45 crc kubenswrapper[4779]: I0929 09:45:45.777580 4779 scope.go:117] "RemoveContainer" containerID="d4de3a0836698c0c5a09218813a0be27e780d4f3fdb91979e05808d164001bf7" Sep 29 09:45:45 crc kubenswrapper[4779]: I0929 09:45:45.785162 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-564fc564d5-g4c8f"] Sep 29 09:45:45 crc kubenswrapper[4779]: I0929 09:45:45.792064 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-564fc564d5-g4c8f"] Sep 29 09:45:45 crc kubenswrapper[4779]: I0929 09:45:45.796944 4779 scope.go:117] "RemoveContainer" containerID="75c905966281b9171fe174486f5a7a989ac0899faeb23457a0464b6c9b96911e" Sep 29 09:45:45 crc kubenswrapper[4779]: E0929 09:45:45.797430 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"75c905966281b9171fe174486f5a7a989ac0899faeb23457a0464b6c9b96911e\": container with ID starting with 75c905966281b9171fe174486f5a7a989ac0899faeb23457a0464b6c9b96911e not found: ID does not exist" containerID="75c905966281b9171fe174486f5a7a989ac0899faeb23457a0464b6c9b96911e" Sep 29 09:45:45 crc kubenswrapper[4779]: I0929 09:45:45.797473 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"75c905966281b9171fe174486f5a7a989ac0899faeb23457a0464b6c9b96911e"} err="failed to get container status \"75c905966281b9171fe174486f5a7a989ac0899faeb23457a0464b6c9b96911e\": rpc error: code = NotFound desc = could not find container \"75c905966281b9171fe174486f5a7a989ac0899faeb23457a0464b6c9b96911e\": container with ID starting with 75c905966281b9171fe174486f5a7a989ac0899faeb23457a0464b6c9b96911e not found: ID does not exist" Sep 29 09:45:45 crc kubenswrapper[4779]: I0929 09:45:45.797499 4779 scope.go:117] "RemoveContainer" containerID="d4de3a0836698c0c5a09218813a0be27e780d4f3fdb91979e05808d164001bf7" Sep 29 09:45:45 crc kubenswrapper[4779]: E0929 09:45:45.798241 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d4de3a0836698c0c5a09218813a0be27e780d4f3fdb91979e05808d164001bf7\": container with ID starting with d4de3a0836698c0c5a09218813a0be27e780d4f3fdb91979e05808d164001bf7 not found: ID does not exist" containerID="d4de3a0836698c0c5a09218813a0be27e780d4f3fdb91979e05808d164001bf7" Sep 29 09:45:45 crc kubenswrapper[4779]: I0929 09:45:45.798267 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d4de3a0836698c0c5a09218813a0be27e780d4f3fdb91979e05808d164001bf7"} err="failed to get container status \"d4de3a0836698c0c5a09218813a0be27e780d4f3fdb91979e05808d164001bf7\": rpc error: code = NotFound desc = could not find container \"d4de3a0836698c0c5a09218813a0be27e780d4f3fdb91979e05808d164001bf7\": container with ID starting with d4de3a0836698c0c5a09218813a0be27e780d4f3fdb91979e05808d164001bf7 not found: ID does not exist" Sep 29 09:45:46 crc kubenswrapper[4779]: I0929 09:45:46.734962 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="503e48ea-44a0-4f04-be6f-4ce12d42b42b" path="/var/lib/kubelet/pods/503e48ea-44a0-4f04-be6f-4ce12d42b42b/volumes" Sep 29 09:45:48 crc kubenswrapper[4779]: I0929 09:45:48.287958 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Sep 29 09:45:48 crc kubenswrapper[4779]: I0929 09:45:48.288326 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Sep 29 09:45:48 crc kubenswrapper[4779]: I0929 09:45:48.357032 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Sep 29 09:45:48 crc kubenswrapper[4779]: I0929 09:45:48.388864 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Sep 29 09:45:48 crc kubenswrapper[4779]: I0929 09:45:48.388943 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Sep 29 09:45:48 crc kubenswrapper[4779]: I0929 09:45:48.753600 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"b3627e10-4513-49fd-bdf5-0a83db9d8561","Type":"ContainerStarted","Data":"983afb67983de941bc23e57c73e1b3978bc75d1b2cd81623560322c36843d4f5"} Sep 29 09:45:48 crc kubenswrapper[4779]: I0929 09:45:48.837894 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Sep 29 09:45:49 crc kubenswrapper[4779]: I0929 09:45:49.086068 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-sd786"] Sep 29 09:45:49 crc kubenswrapper[4779]: E0929 09:45:49.086427 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="503e48ea-44a0-4f04-be6f-4ce12d42b42b" containerName="dnsmasq-dns" Sep 29 09:45:49 crc kubenswrapper[4779]: I0929 09:45:49.086444 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="503e48ea-44a0-4f04-be6f-4ce12d42b42b" containerName="dnsmasq-dns" Sep 29 09:45:49 crc kubenswrapper[4779]: E0929 09:45:49.086467 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="503e48ea-44a0-4f04-be6f-4ce12d42b42b" containerName="init" Sep 29 09:45:49 crc kubenswrapper[4779]: I0929 09:45:49.086473 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="503e48ea-44a0-4f04-be6f-4ce12d42b42b" containerName="init" Sep 29 09:45:49 crc kubenswrapper[4779]: I0929 09:45:49.086637 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="503e48ea-44a0-4f04-be6f-4ce12d42b42b" containerName="dnsmasq-dns" Sep 29 09:45:49 crc kubenswrapper[4779]: I0929 09:45:49.087287 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-sd786" Sep 29 09:45:49 crc kubenswrapper[4779]: I0929 09:45:49.100262 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-sd786"] Sep 29 09:45:49 crc kubenswrapper[4779]: I0929 09:45:49.224601 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7dd9r\" (UniqueName: \"kubernetes.io/projected/bb3e5b11-382a-4ff0-af29-11a3573ff188-kube-api-access-7dd9r\") pod \"glance-db-create-sd786\" (UID: \"bb3e5b11-382a-4ff0-af29-11a3573ff188\") " pod="openstack/glance-db-create-sd786" Sep 29 09:45:49 crc kubenswrapper[4779]: I0929 09:45:49.326157 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7dd9r\" (UniqueName: \"kubernetes.io/projected/bb3e5b11-382a-4ff0-af29-11a3573ff188-kube-api-access-7dd9r\") pod \"glance-db-create-sd786\" (UID: \"bb3e5b11-382a-4ff0-af29-11a3573ff188\") " pod="openstack/glance-db-create-sd786" Sep 29 09:45:49 crc kubenswrapper[4779]: I0929 09:45:49.343730 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7dd9r\" (UniqueName: \"kubernetes.io/projected/bb3e5b11-382a-4ff0-af29-11a3573ff188-kube-api-access-7dd9r\") pod \"glance-db-create-sd786\" (UID: \"bb3e5b11-382a-4ff0-af29-11a3573ff188\") " pod="openstack/glance-db-create-sd786" Sep 29 09:45:49 crc kubenswrapper[4779]: I0929 09:45:49.405613 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-sd786" Sep 29 09:45:49 crc kubenswrapper[4779]: I0929 09:45:49.864017 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-sd786"] Sep 29 09:45:49 crc kubenswrapper[4779]: W0929 09:45:49.870018 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbb3e5b11_382a_4ff0_af29_11a3573ff188.slice/crio-eb9acef9cff70ebae832c62bdfd549fb57deb12ac383e3bae5e0cd4ac7230ee6 WatchSource:0}: Error finding container eb9acef9cff70ebae832c62bdfd549fb57deb12ac383e3bae5e0cd4ac7230ee6: Status 404 returned error can't find the container with id eb9acef9cff70ebae832c62bdfd549fb57deb12ac383e3bae5e0cd4ac7230ee6 Sep 29 09:45:50 crc kubenswrapper[4779]: I0929 09:45:50.464111 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Sep 29 09:45:50 crc kubenswrapper[4779]: I0929 09:45:50.568861 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Sep 29 09:45:50 crc kubenswrapper[4779]: I0929 09:45:50.574235 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-db-create-c9jzr"] Sep 29 09:45:50 crc kubenswrapper[4779]: I0929 09:45:50.575836 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-create-c9jzr" Sep 29 09:45:50 crc kubenswrapper[4779]: I0929 09:45:50.582751 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-db-create-c9jzr"] Sep 29 09:45:50 crc kubenswrapper[4779]: I0929 09:45:50.646630 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q59zs\" (UniqueName: \"kubernetes.io/projected/ff694e86-4a85-4102-9aff-8c91a7bb9106-kube-api-access-q59zs\") pod \"watcher-db-create-c9jzr\" (UID: \"ff694e86-4a85-4102-9aff-8c91a7bb9106\") " pod="openstack/watcher-db-create-c9jzr" Sep 29 09:45:50 crc kubenswrapper[4779]: I0929 09:45:50.747940 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q59zs\" (UniqueName: \"kubernetes.io/projected/ff694e86-4a85-4102-9aff-8c91a7bb9106-kube-api-access-q59zs\") pod \"watcher-db-create-c9jzr\" (UID: \"ff694e86-4a85-4102-9aff-8c91a7bb9106\") " pod="openstack/watcher-db-create-c9jzr" Sep 29 09:45:50 crc kubenswrapper[4779]: I0929 09:45:50.771400 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"b3627e10-4513-49fd-bdf5-0a83db9d8561","Type":"ContainerStarted","Data":"06724ab6700ef6e96d780244b0fac2c116335626451a7b19f3348e2e30349e9d"} Sep 29 09:45:50 crc kubenswrapper[4779]: I0929 09:45:50.772892 4779 generic.go:334] "Generic (PLEG): container finished" podID="bb3e5b11-382a-4ff0-af29-11a3573ff188" containerID="86ba1475aa4cafc80d5808d802fbc60525a1629426a21eed31f1e7f3cb274630" exitCode=0 Sep 29 09:45:50 crc kubenswrapper[4779]: I0929 09:45:50.774080 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-sd786" event={"ID":"bb3e5b11-382a-4ff0-af29-11a3573ff188","Type":"ContainerDied","Data":"86ba1475aa4cafc80d5808d802fbc60525a1629426a21eed31f1e7f3cb274630"} Sep 29 09:45:50 crc kubenswrapper[4779]: I0929 09:45:50.774106 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-sd786" event={"ID":"bb3e5b11-382a-4ff0-af29-11a3573ff188","Type":"ContainerStarted","Data":"eb9acef9cff70ebae832c62bdfd549fb57deb12ac383e3bae5e0cd4ac7230ee6"} Sep 29 09:45:50 crc kubenswrapper[4779]: I0929 09:45:50.780437 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q59zs\" (UniqueName: \"kubernetes.io/projected/ff694e86-4a85-4102-9aff-8c91a7bb9106-kube-api-access-q59zs\") pod \"watcher-db-create-c9jzr\" (UID: \"ff694e86-4a85-4102-9aff-8c91a7bb9106\") " pod="openstack/watcher-db-create-c9jzr" Sep 29 09:45:50 crc kubenswrapper[4779]: I0929 09:45:50.895625 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-create-c9jzr" Sep 29 09:45:51 crc kubenswrapper[4779]: I0929 09:45:51.311868 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-db-create-c9jzr"] Sep 29 09:45:51 crc kubenswrapper[4779]: W0929 09:45:51.315723 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podff694e86_4a85_4102_9aff_8c91a7bb9106.slice/crio-b78fe72114f705759dd35586cb75d424b9f00cb5cf9ff1cb82783ce69db63083 WatchSource:0}: Error finding container b78fe72114f705759dd35586cb75d424b9f00cb5cf9ff1cb82783ce69db63083: Status 404 returned error can't find the container with id b78fe72114f705759dd35586cb75d424b9f00cb5cf9ff1cb82783ce69db63083 Sep 29 09:45:51 crc kubenswrapper[4779]: I0929 09:45:51.781932 4779 generic.go:334] "Generic (PLEG): container finished" podID="ff694e86-4a85-4102-9aff-8c91a7bb9106" containerID="9d53d74c76ef90d1be78b9d8eb35bfa96765c67318df2e7b3e506299d03c3cf6" exitCode=0 Sep 29 09:45:51 crc kubenswrapper[4779]: I0929 09:45:51.782000 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-create-c9jzr" event={"ID":"ff694e86-4a85-4102-9aff-8c91a7bb9106","Type":"ContainerDied","Data":"9d53d74c76ef90d1be78b9d8eb35bfa96765c67318df2e7b3e506299d03c3cf6"} Sep 29 09:45:51 crc kubenswrapper[4779]: I0929 09:45:51.782258 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-create-c9jzr" event={"ID":"ff694e86-4a85-4102-9aff-8c91a7bb9106","Type":"ContainerStarted","Data":"b78fe72114f705759dd35586cb75d424b9f00cb5cf9ff1cb82783ce69db63083"} Sep 29 09:45:52 crc kubenswrapper[4779]: I0929 09:45:52.084570 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-sd786" Sep 29 09:45:52 crc kubenswrapper[4779]: I0929 09:45:52.170377 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7dd9r\" (UniqueName: \"kubernetes.io/projected/bb3e5b11-382a-4ff0-af29-11a3573ff188-kube-api-access-7dd9r\") pod \"bb3e5b11-382a-4ff0-af29-11a3573ff188\" (UID: \"bb3e5b11-382a-4ff0-af29-11a3573ff188\") " Sep 29 09:45:52 crc kubenswrapper[4779]: I0929 09:45:52.175409 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bb3e5b11-382a-4ff0-af29-11a3573ff188-kube-api-access-7dd9r" (OuterVolumeSpecName: "kube-api-access-7dd9r") pod "bb3e5b11-382a-4ff0-af29-11a3573ff188" (UID: "bb3e5b11-382a-4ff0-af29-11a3573ff188"). InnerVolumeSpecName "kube-api-access-7dd9r". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:45:52 crc kubenswrapper[4779]: I0929 09:45:52.272794 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7dd9r\" (UniqueName: \"kubernetes.io/projected/bb3e5b11-382a-4ff0-af29-11a3573ff188-kube-api-access-7dd9r\") on node \"crc\" DevicePath \"\"" Sep 29 09:45:52 crc kubenswrapper[4779]: I0929 09:45:52.789099 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-sd786" Sep 29 09:45:52 crc kubenswrapper[4779]: I0929 09:45:52.789110 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-sd786" event={"ID":"bb3e5b11-382a-4ff0-af29-11a3573ff188","Type":"ContainerDied","Data":"eb9acef9cff70ebae832c62bdfd549fb57deb12ac383e3bae5e0cd4ac7230ee6"} Sep 29 09:45:52 crc kubenswrapper[4779]: I0929 09:45:52.789152 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="eb9acef9cff70ebae832c62bdfd549fb57deb12ac383e3bae5e0cd4ac7230ee6" Sep 29 09:45:53 crc kubenswrapper[4779]: I0929 09:45:53.136258 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-create-c9jzr" Sep 29 09:45:53 crc kubenswrapper[4779]: I0929 09:45:53.190828 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q59zs\" (UniqueName: \"kubernetes.io/projected/ff694e86-4a85-4102-9aff-8c91a7bb9106-kube-api-access-q59zs\") pod \"ff694e86-4a85-4102-9aff-8c91a7bb9106\" (UID: \"ff694e86-4a85-4102-9aff-8c91a7bb9106\") " Sep 29 09:45:53 crc kubenswrapper[4779]: I0929 09:45:53.196412 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ff694e86-4a85-4102-9aff-8c91a7bb9106-kube-api-access-q59zs" (OuterVolumeSpecName: "kube-api-access-q59zs") pod "ff694e86-4a85-4102-9aff-8c91a7bb9106" (UID: "ff694e86-4a85-4102-9aff-8c91a7bb9106"). InnerVolumeSpecName "kube-api-access-q59zs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:45:53 crc kubenswrapper[4779]: I0929 09:45:53.292755 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q59zs\" (UniqueName: \"kubernetes.io/projected/ff694e86-4a85-4102-9aff-8c91a7bb9106-kube-api-access-q59zs\") on node \"crc\" DevicePath \"\"" Sep 29 09:45:53 crc kubenswrapper[4779]: I0929 09:45:53.798789 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-create-c9jzr" event={"ID":"ff694e86-4a85-4102-9aff-8c91a7bb9106","Type":"ContainerDied","Data":"b78fe72114f705759dd35586cb75d424b9f00cb5cf9ff1cb82783ce69db63083"} Sep 29 09:45:53 crc kubenswrapper[4779]: I0929 09:45:53.799059 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b78fe72114f705759dd35586cb75d424b9f00cb5cf9ff1cb82783ce69db63083" Sep 29 09:45:53 crc kubenswrapper[4779]: I0929 09:45:53.798830 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-create-c9jzr" Sep 29 09:45:53 crc kubenswrapper[4779]: I0929 09:45:53.803962 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"b3627e10-4513-49fd-bdf5-0a83db9d8561","Type":"ContainerStarted","Data":"e21b42fa2675e97e76e6b35006c8c61e3f4f5224c2e2a02cc02e44a4178bbeff"} Sep 29 09:45:53 crc kubenswrapper[4779]: I0929 09:45:53.837091 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=9.833214088 podStartE2EDuration="43.837070514s" podCreationTimestamp="2025-09-29 09:45:10 +0000 UTC" firstStartedPulling="2025-09-29 09:45:18.664194135 +0000 UTC m=+950.645518039" lastFinishedPulling="2025-09-29 09:45:52.668050561 +0000 UTC m=+984.649374465" observedRunningTime="2025-09-29 09:45:53.833579454 +0000 UTC m=+985.814903358" watchObservedRunningTime="2025-09-29 09:45:53.837070514 +0000 UTC m=+985.818394418" Sep 29 09:45:55 crc kubenswrapper[4779]: I0929 09:45:55.370258 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Sep 29 09:45:56 crc kubenswrapper[4779]: I0929 09:45:56.918684 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Sep 29 09:45:56 crc kubenswrapper[4779]: I0929 09:45:56.919112 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Sep 29 09:45:56 crc kubenswrapper[4779]: I0929 09:45:56.921111 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Sep 29 09:45:57 crc kubenswrapper[4779]: I0929 09:45:57.835283 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Sep 29 09:45:58 crc kubenswrapper[4779]: I0929 09:45:58.460143 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-q8jgk"] Sep 29 09:45:58 crc kubenswrapper[4779]: E0929 09:45:58.460492 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff694e86-4a85-4102-9aff-8c91a7bb9106" containerName="mariadb-database-create" Sep 29 09:45:58 crc kubenswrapper[4779]: I0929 09:45:58.460505 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff694e86-4a85-4102-9aff-8c91a7bb9106" containerName="mariadb-database-create" Sep 29 09:45:58 crc kubenswrapper[4779]: E0929 09:45:58.460515 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb3e5b11-382a-4ff0-af29-11a3573ff188" containerName="mariadb-database-create" Sep 29 09:45:58 crc kubenswrapper[4779]: I0929 09:45:58.460521 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb3e5b11-382a-4ff0-af29-11a3573ff188" containerName="mariadb-database-create" Sep 29 09:45:58 crc kubenswrapper[4779]: I0929 09:45:58.460671 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff694e86-4a85-4102-9aff-8c91a7bb9106" containerName="mariadb-database-create" Sep 29 09:45:58 crc kubenswrapper[4779]: I0929 09:45:58.460701 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="bb3e5b11-382a-4ff0-af29-11a3573ff188" containerName="mariadb-database-create" Sep 29 09:45:58 crc kubenswrapper[4779]: I0929 09:45:58.461275 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-q8jgk" Sep 29 09:45:58 crc kubenswrapper[4779]: I0929 09:45:58.473817 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-q8jgk"] Sep 29 09:45:58 crc kubenswrapper[4779]: I0929 09:45:58.596942 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-74bcj\" (UniqueName: \"kubernetes.io/projected/390b7729-1b5b-4581-98c4-decf700653e1-kube-api-access-74bcj\") pod \"keystone-db-create-q8jgk\" (UID: \"390b7729-1b5b-4581-98c4-decf700653e1\") " pod="openstack/keystone-db-create-q8jgk" Sep 29 09:45:58 crc kubenswrapper[4779]: I0929 09:45:58.699079 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-74bcj\" (UniqueName: \"kubernetes.io/projected/390b7729-1b5b-4581-98c4-decf700653e1-kube-api-access-74bcj\") pod \"keystone-db-create-q8jgk\" (UID: \"390b7729-1b5b-4581-98c4-decf700653e1\") " pod="openstack/keystone-db-create-q8jgk" Sep 29 09:45:58 crc kubenswrapper[4779]: I0929 09:45:58.741819 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-74bcj\" (UniqueName: \"kubernetes.io/projected/390b7729-1b5b-4581-98c4-decf700653e1-kube-api-access-74bcj\") pod \"keystone-db-create-q8jgk\" (UID: \"390b7729-1b5b-4581-98c4-decf700653e1\") " pod="openstack/keystone-db-create-q8jgk" Sep 29 09:45:58 crc kubenswrapper[4779]: I0929 09:45:58.784564 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-q8jgk" Sep 29 09:45:58 crc kubenswrapper[4779]: I0929 09:45:58.805527 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-sr2s6"] Sep 29 09:45:58 crc kubenswrapper[4779]: I0929 09:45:58.806601 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-sr2s6" Sep 29 09:45:58 crc kubenswrapper[4779]: I0929 09:45:58.824625 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-sr2s6"] Sep 29 09:45:58 crc kubenswrapper[4779]: I0929 09:45:58.901704 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6nkkz\" (UniqueName: \"kubernetes.io/projected/7180a99e-b8b5-4c57-9bbe-221f6b83bb16-kube-api-access-6nkkz\") pod \"placement-db-create-sr2s6\" (UID: \"7180a99e-b8b5-4c57-9bbe-221f6b83bb16\") " pod="openstack/placement-db-create-sr2s6" Sep 29 09:45:59 crc kubenswrapper[4779]: I0929 09:45:59.003315 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6nkkz\" (UniqueName: \"kubernetes.io/projected/7180a99e-b8b5-4c57-9bbe-221f6b83bb16-kube-api-access-6nkkz\") pod \"placement-db-create-sr2s6\" (UID: \"7180a99e-b8b5-4c57-9bbe-221f6b83bb16\") " pod="openstack/placement-db-create-sr2s6" Sep 29 09:45:59 crc kubenswrapper[4779]: I0929 09:45:59.032936 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6nkkz\" (UniqueName: \"kubernetes.io/projected/7180a99e-b8b5-4c57-9bbe-221f6b83bb16-kube-api-access-6nkkz\") pod \"placement-db-create-sr2s6\" (UID: \"7180a99e-b8b5-4c57-9bbe-221f6b83bb16\") " pod="openstack/placement-db-create-sr2s6" Sep 29 09:45:59 crc kubenswrapper[4779]: I0929 09:45:59.112822 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-7fff-account-create-zgx74"] Sep 29 09:45:59 crc kubenswrapper[4779]: I0929 09:45:59.118221 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-7fff-account-create-zgx74" Sep 29 09:45:59 crc kubenswrapper[4779]: I0929 09:45:59.125238 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Sep 29 09:45:59 crc kubenswrapper[4779]: I0929 09:45:59.162163 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-7fff-account-create-zgx74"] Sep 29 09:45:59 crc kubenswrapper[4779]: I0929 09:45:59.200301 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-sr2s6" Sep 29 09:45:59 crc kubenswrapper[4779]: I0929 09:45:59.208049 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sd4sl\" (UniqueName: \"kubernetes.io/projected/b4c0f69c-d549-4d1d-b893-01aac180cf13-kube-api-access-sd4sl\") pod \"glance-7fff-account-create-zgx74\" (UID: \"b4c0f69c-d549-4d1d-b893-01aac180cf13\") " pod="openstack/glance-7fff-account-create-zgx74" Sep 29 09:45:59 crc kubenswrapper[4779]: I0929 09:45:59.284233 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-q8jgk"] Sep 29 09:45:59 crc kubenswrapper[4779]: I0929 09:45:59.311341 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sd4sl\" (UniqueName: \"kubernetes.io/projected/b4c0f69c-d549-4d1d-b893-01aac180cf13-kube-api-access-sd4sl\") pod \"glance-7fff-account-create-zgx74\" (UID: \"b4c0f69c-d549-4d1d-b893-01aac180cf13\") " pod="openstack/glance-7fff-account-create-zgx74" Sep 29 09:45:59 crc kubenswrapper[4779]: I0929 09:45:59.326695 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sd4sl\" (UniqueName: \"kubernetes.io/projected/b4c0f69c-d549-4d1d-b893-01aac180cf13-kube-api-access-sd4sl\") pod \"glance-7fff-account-create-zgx74\" (UID: \"b4c0f69c-d549-4d1d-b893-01aac180cf13\") " pod="openstack/glance-7fff-account-create-zgx74" Sep 29 09:45:59 crc kubenswrapper[4779]: I0929 09:45:59.458073 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-7fff-account-create-zgx74" Sep 29 09:45:59 crc kubenswrapper[4779]: I0929 09:45:59.629641 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-sr2s6"] Sep 29 09:45:59 crc kubenswrapper[4779]: W0929 09:45:59.652661 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7180a99e_b8b5_4c57_9bbe_221f6b83bb16.slice/crio-8cc825b0d5a80886e71b4eaa1c542fee3a4ae9e218e7fd73e6555f439192d4d8 WatchSource:0}: Error finding container 8cc825b0d5a80886e71b4eaa1c542fee3a4ae9e218e7fd73e6555f439192d4d8: Status 404 returned error can't find the container with id 8cc825b0d5a80886e71b4eaa1c542fee3a4ae9e218e7fd73e6555f439192d4d8 Sep 29 09:45:59 crc kubenswrapper[4779]: I0929 09:45:59.858986 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-sr2s6" event={"ID":"7180a99e-b8b5-4c57-9bbe-221f6b83bb16","Type":"ContainerStarted","Data":"71d9135b9439a259f8866b01cf297bd49759f4325aa6148e628979f2a5599da5"} Sep 29 09:45:59 crc kubenswrapper[4779]: I0929 09:45:59.859232 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-sr2s6" event={"ID":"7180a99e-b8b5-4c57-9bbe-221f6b83bb16","Type":"ContainerStarted","Data":"8cc825b0d5a80886e71b4eaa1c542fee3a4ae9e218e7fd73e6555f439192d4d8"} Sep 29 09:45:59 crc kubenswrapper[4779]: I0929 09:45:59.860645 4779 generic.go:334] "Generic (PLEG): container finished" podID="390b7729-1b5b-4581-98c4-decf700653e1" containerID="21943e6406322efcbed715e36a162f36d904fbcb8596d313a68820341259471f" exitCode=0 Sep 29 09:45:59 crc kubenswrapper[4779]: I0929 09:45:59.860687 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-q8jgk" event={"ID":"390b7729-1b5b-4581-98c4-decf700653e1","Type":"ContainerDied","Data":"21943e6406322efcbed715e36a162f36d904fbcb8596d313a68820341259471f"} Sep 29 09:45:59 crc kubenswrapper[4779]: I0929 09:45:59.860727 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-q8jgk" event={"ID":"390b7729-1b5b-4581-98c4-decf700653e1","Type":"ContainerStarted","Data":"4edf8dc3294defb5771ed920afb4f91232afc591ce89eae1b557eb052b04ff62"} Sep 29 09:45:59 crc kubenswrapper[4779]: I0929 09:45:59.886478 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-create-sr2s6" podStartSLOduration=1.886458582 podStartE2EDuration="1.886458582s" podCreationTimestamp="2025-09-29 09:45:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:45:59.87319559 +0000 UTC m=+991.854519494" watchObservedRunningTime="2025-09-29 09:45:59.886458582 +0000 UTC m=+991.867782486" Sep 29 09:45:59 crc kubenswrapper[4779]: I0929 09:45:59.887455 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-7fff-account-create-zgx74"] Sep 29 09:46:00 crc kubenswrapper[4779]: I0929 09:46:00.384162 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Sep 29 09:46:00 crc kubenswrapper[4779]: I0929 09:46:00.384407 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="b3627e10-4513-49fd-bdf5-0a83db9d8561" containerName="prometheus" containerID="cri-o://983afb67983de941bc23e57c73e1b3978bc75d1b2cd81623560322c36843d4f5" gracePeriod=600 Sep 29 09:46:00 crc kubenswrapper[4779]: I0929 09:46:00.384509 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="b3627e10-4513-49fd-bdf5-0a83db9d8561" containerName="config-reloader" containerID="cri-o://06724ab6700ef6e96d780244b0fac2c116335626451a7b19f3348e2e30349e9d" gracePeriod=600 Sep 29 09:46:00 crc kubenswrapper[4779]: I0929 09:46:00.384500 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="b3627e10-4513-49fd-bdf5-0a83db9d8561" containerName="thanos-sidecar" containerID="cri-o://e21b42fa2675e97e76e6b35006c8c61e3f4f5224c2e2a02cc02e44a4178bbeff" gracePeriod=600 Sep 29 09:46:00 crc kubenswrapper[4779]: I0929 09:46:00.653382 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-0ed1-account-create-lmlvp"] Sep 29 09:46:00 crc kubenswrapper[4779]: I0929 09:46:00.654549 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-0ed1-account-create-lmlvp" Sep 29 09:46:00 crc kubenswrapper[4779]: I0929 09:46:00.656638 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-db-secret" Sep 29 09:46:00 crc kubenswrapper[4779]: I0929 09:46:00.663506 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-0ed1-account-create-lmlvp"] Sep 29 09:46:00 crc kubenswrapper[4779]: I0929 09:46:00.740845 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8mkpw\" (UniqueName: \"kubernetes.io/projected/308b21fc-c8dc-4f88-a842-a5517b2bb2e9-kube-api-access-8mkpw\") pod \"watcher-0ed1-account-create-lmlvp\" (UID: \"308b21fc-c8dc-4f88-a842-a5517b2bb2e9\") " pod="openstack/watcher-0ed1-account-create-lmlvp" Sep 29 09:46:00 crc kubenswrapper[4779]: I0929 09:46:00.842808 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8mkpw\" (UniqueName: \"kubernetes.io/projected/308b21fc-c8dc-4f88-a842-a5517b2bb2e9-kube-api-access-8mkpw\") pod \"watcher-0ed1-account-create-lmlvp\" (UID: \"308b21fc-c8dc-4f88-a842-a5517b2bb2e9\") " pod="openstack/watcher-0ed1-account-create-lmlvp" Sep 29 09:46:00 crc kubenswrapper[4779]: I0929 09:46:00.866529 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8mkpw\" (UniqueName: \"kubernetes.io/projected/308b21fc-c8dc-4f88-a842-a5517b2bb2e9-kube-api-access-8mkpw\") pod \"watcher-0ed1-account-create-lmlvp\" (UID: \"308b21fc-c8dc-4f88-a842-a5517b2bb2e9\") " pod="openstack/watcher-0ed1-account-create-lmlvp" Sep 29 09:46:00 crc kubenswrapper[4779]: I0929 09:46:00.873003 4779 generic.go:334] "Generic (PLEG): container finished" podID="daa6f578-ea44-4555-be0c-e2b8662386f0" containerID="331f0e0f0bd4e4a5bf2f8d1dce8432cf666be90233449ac92419a403e10a74a8" exitCode=0 Sep 29 09:46:00 crc kubenswrapper[4779]: I0929 09:46:00.873108 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-notifications-server-0" event={"ID":"daa6f578-ea44-4555-be0c-e2b8662386f0","Type":"ContainerDied","Data":"331f0e0f0bd4e4a5bf2f8d1dce8432cf666be90233449ac92419a403e10a74a8"} Sep 29 09:46:00 crc kubenswrapper[4779]: I0929 09:46:00.877702 4779 generic.go:334] "Generic (PLEG): container finished" podID="b3627e10-4513-49fd-bdf5-0a83db9d8561" containerID="e21b42fa2675e97e76e6b35006c8c61e3f4f5224c2e2a02cc02e44a4178bbeff" exitCode=0 Sep 29 09:46:00 crc kubenswrapper[4779]: I0929 09:46:00.877731 4779 generic.go:334] "Generic (PLEG): container finished" podID="b3627e10-4513-49fd-bdf5-0a83db9d8561" containerID="06724ab6700ef6e96d780244b0fac2c116335626451a7b19f3348e2e30349e9d" exitCode=0 Sep 29 09:46:00 crc kubenswrapper[4779]: I0929 09:46:00.877740 4779 generic.go:334] "Generic (PLEG): container finished" podID="b3627e10-4513-49fd-bdf5-0a83db9d8561" containerID="983afb67983de941bc23e57c73e1b3978bc75d1b2cd81623560322c36843d4f5" exitCode=0 Sep 29 09:46:00 crc kubenswrapper[4779]: I0929 09:46:00.877776 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"b3627e10-4513-49fd-bdf5-0a83db9d8561","Type":"ContainerDied","Data":"e21b42fa2675e97e76e6b35006c8c61e3f4f5224c2e2a02cc02e44a4178bbeff"} Sep 29 09:46:00 crc kubenswrapper[4779]: I0929 09:46:00.877819 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"b3627e10-4513-49fd-bdf5-0a83db9d8561","Type":"ContainerDied","Data":"06724ab6700ef6e96d780244b0fac2c116335626451a7b19f3348e2e30349e9d"} Sep 29 09:46:00 crc kubenswrapper[4779]: I0929 09:46:00.878062 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"b3627e10-4513-49fd-bdf5-0a83db9d8561","Type":"ContainerDied","Data":"983afb67983de941bc23e57c73e1b3978bc75d1b2cd81623560322c36843d4f5"} Sep 29 09:46:00 crc kubenswrapper[4779]: I0929 09:46:00.879308 4779 generic.go:334] "Generic (PLEG): container finished" podID="7180a99e-b8b5-4c57-9bbe-221f6b83bb16" containerID="71d9135b9439a259f8866b01cf297bd49759f4325aa6148e628979f2a5599da5" exitCode=0 Sep 29 09:46:00 crc kubenswrapper[4779]: I0929 09:46:00.879442 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-sr2s6" event={"ID":"7180a99e-b8b5-4c57-9bbe-221f6b83bb16","Type":"ContainerDied","Data":"71d9135b9439a259f8866b01cf297bd49759f4325aa6148e628979f2a5599da5"} Sep 29 09:46:00 crc kubenswrapper[4779]: I0929 09:46:00.882240 4779 generic.go:334] "Generic (PLEG): container finished" podID="b4c0f69c-d549-4d1d-b893-01aac180cf13" containerID="64093fcd75ba4993fc70e84f1f222e9b7df183fe5712749c18488cb3786cd5ea" exitCode=0 Sep 29 09:46:00 crc kubenswrapper[4779]: I0929 09:46:00.882433 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-7fff-account-create-zgx74" event={"ID":"b4c0f69c-d549-4d1d-b893-01aac180cf13","Type":"ContainerDied","Data":"64093fcd75ba4993fc70e84f1f222e9b7df183fe5712749c18488cb3786cd5ea"} Sep 29 09:46:00 crc kubenswrapper[4779]: I0929 09:46:00.882476 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-7fff-account-create-zgx74" event={"ID":"b4c0f69c-d549-4d1d-b893-01aac180cf13","Type":"ContainerStarted","Data":"5aee6319d37e06f88f7e1d4502cf33348c2515ae208de0fe50246b867442a6d6"} Sep 29 09:46:00 crc kubenswrapper[4779]: I0929 09:46:00.978424 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-0ed1-account-create-lmlvp" Sep 29 09:46:01 crc kubenswrapper[4779]: I0929 09:46:01.140759 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-q8jgk" Sep 29 09:46:01 crc kubenswrapper[4779]: I0929 09:46:01.250147 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-74bcj\" (UniqueName: \"kubernetes.io/projected/390b7729-1b5b-4581-98c4-decf700653e1-kube-api-access-74bcj\") pod \"390b7729-1b5b-4581-98c4-decf700653e1\" (UID: \"390b7729-1b5b-4581-98c4-decf700653e1\") " Sep 29 09:46:01 crc kubenswrapper[4779]: I0929 09:46:01.255787 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/390b7729-1b5b-4581-98c4-decf700653e1-kube-api-access-74bcj" (OuterVolumeSpecName: "kube-api-access-74bcj") pod "390b7729-1b5b-4581-98c4-decf700653e1" (UID: "390b7729-1b5b-4581-98c4-decf700653e1"). InnerVolumeSpecName "kube-api-access-74bcj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:46:01 crc kubenswrapper[4779]: I0929 09:46:01.329716 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Sep 29 09:46:01 crc kubenswrapper[4779]: I0929 09:46:01.352330 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-74bcj\" (UniqueName: \"kubernetes.io/projected/390b7729-1b5b-4581-98c4-decf700653e1-kube-api-access-74bcj\") on node \"crc\" DevicePath \"\"" Sep 29 09:46:01 crc kubenswrapper[4779]: I0929 09:46:01.452851 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/b3627e10-4513-49fd-bdf5-0a83db9d8561-thanos-prometheus-http-client-file\") pod \"b3627e10-4513-49fd-bdf5-0a83db9d8561\" (UID: \"b3627e10-4513-49fd-bdf5-0a83db9d8561\") " Sep 29 09:46:01 crc kubenswrapper[4779]: I0929 09:46:01.452941 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/b3627e10-4513-49fd-bdf5-0a83db9d8561-config-out\") pod \"b3627e10-4513-49fd-bdf5-0a83db9d8561\" (UID: \"b3627e10-4513-49fd-bdf5-0a83db9d8561\") " Sep 29 09:46:01 crc kubenswrapper[4779]: I0929 09:46:01.453027 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/b3627e10-4513-49fd-bdf5-0a83db9d8561-tls-assets\") pod \"b3627e10-4513-49fd-bdf5-0a83db9d8561\" (UID: \"b3627e10-4513-49fd-bdf5-0a83db9d8561\") " Sep 29 09:46:01 crc kubenswrapper[4779]: I0929 09:46:01.453055 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b3627e10-4513-49fd-bdf5-0a83db9d8561-config\") pod \"b3627e10-4513-49fd-bdf5-0a83db9d8561\" (UID: \"b3627e10-4513-49fd-bdf5-0a83db9d8561\") " Sep 29 09:46:01 crc kubenswrapper[4779]: I0929 09:46:01.453096 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xnkhx\" (UniqueName: \"kubernetes.io/projected/b3627e10-4513-49fd-bdf5-0a83db9d8561-kube-api-access-xnkhx\") pod \"b3627e10-4513-49fd-bdf5-0a83db9d8561\" (UID: \"b3627e10-4513-49fd-bdf5-0a83db9d8561\") " Sep 29 09:46:01 crc kubenswrapper[4779]: I0929 09:46:01.453133 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/b3627e10-4513-49fd-bdf5-0a83db9d8561-web-config\") pod \"b3627e10-4513-49fd-bdf5-0a83db9d8561\" (UID: \"b3627e10-4513-49fd-bdf5-0a83db9d8561\") " Sep 29 09:46:01 crc kubenswrapper[4779]: I0929 09:46:01.453237 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-db\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-839bd8cf-b5d6-43ab-b010-7b66d2eb87b8\") pod \"b3627e10-4513-49fd-bdf5-0a83db9d8561\" (UID: \"b3627e10-4513-49fd-bdf5-0a83db9d8561\") " Sep 29 09:46:01 crc kubenswrapper[4779]: I0929 09:46:01.453297 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/b3627e10-4513-49fd-bdf5-0a83db9d8561-prometheus-metric-storage-rulefiles-0\") pod \"b3627e10-4513-49fd-bdf5-0a83db9d8561\" (UID: \"b3627e10-4513-49fd-bdf5-0a83db9d8561\") " Sep 29 09:46:01 crc kubenswrapper[4779]: I0929 09:46:01.454083 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b3627e10-4513-49fd-bdf5-0a83db9d8561-prometheus-metric-storage-rulefiles-0" (OuterVolumeSpecName: "prometheus-metric-storage-rulefiles-0") pod "b3627e10-4513-49fd-bdf5-0a83db9d8561" (UID: "b3627e10-4513-49fd-bdf5-0a83db9d8561"). InnerVolumeSpecName "prometheus-metric-storage-rulefiles-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:46:01 crc kubenswrapper[4779]: I0929 09:46:01.458289 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b3627e10-4513-49fd-bdf5-0a83db9d8561-config-out" (OuterVolumeSpecName: "config-out") pod "b3627e10-4513-49fd-bdf5-0a83db9d8561" (UID: "b3627e10-4513-49fd-bdf5-0a83db9d8561"). InnerVolumeSpecName "config-out". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:46:01 crc kubenswrapper[4779]: I0929 09:46:01.458397 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3627e10-4513-49fd-bdf5-0a83db9d8561-config" (OuterVolumeSpecName: "config") pod "b3627e10-4513-49fd-bdf5-0a83db9d8561" (UID: "b3627e10-4513-49fd-bdf5-0a83db9d8561"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:46:01 crc kubenswrapper[4779]: I0929 09:46:01.458491 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3627e10-4513-49fd-bdf5-0a83db9d8561-kube-api-access-xnkhx" (OuterVolumeSpecName: "kube-api-access-xnkhx") pod "b3627e10-4513-49fd-bdf5-0a83db9d8561" (UID: "b3627e10-4513-49fd-bdf5-0a83db9d8561"). InnerVolumeSpecName "kube-api-access-xnkhx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:46:01 crc kubenswrapper[4779]: I0929 09:46:01.458936 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3627e10-4513-49fd-bdf5-0a83db9d8561-tls-assets" (OuterVolumeSpecName: "tls-assets") pod "b3627e10-4513-49fd-bdf5-0a83db9d8561" (UID: "b3627e10-4513-49fd-bdf5-0a83db9d8561"). InnerVolumeSpecName "tls-assets". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:46:01 crc kubenswrapper[4779]: I0929 09:46:01.463086 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3627e10-4513-49fd-bdf5-0a83db9d8561-thanos-prometheus-http-client-file" (OuterVolumeSpecName: "thanos-prometheus-http-client-file") pod "b3627e10-4513-49fd-bdf5-0a83db9d8561" (UID: "b3627e10-4513-49fd-bdf5-0a83db9d8561"). InnerVolumeSpecName "thanos-prometheus-http-client-file". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:46:01 crc kubenswrapper[4779]: I0929 09:46:01.474046 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-839bd8cf-b5d6-43ab-b010-7b66d2eb87b8" (OuterVolumeSpecName: "prometheus-metric-storage-db") pod "b3627e10-4513-49fd-bdf5-0a83db9d8561" (UID: "b3627e10-4513-49fd-bdf5-0a83db9d8561"). InnerVolumeSpecName "pvc-839bd8cf-b5d6-43ab-b010-7b66d2eb87b8". PluginName "kubernetes.io/csi", VolumeGidValue "" Sep 29 09:46:01 crc kubenswrapper[4779]: I0929 09:46:01.486727 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3627e10-4513-49fd-bdf5-0a83db9d8561-web-config" (OuterVolumeSpecName: "web-config") pod "b3627e10-4513-49fd-bdf5-0a83db9d8561" (UID: "b3627e10-4513-49fd-bdf5-0a83db9d8561"). InnerVolumeSpecName "web-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:46:01 crc kubenswrapper[4779]: I0929 09:46:01.497169 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-0ed1-account-create-lmlvp"] Sep 29 09:46:01 crc kubenswrapper[4779]: W0929 09:46:01.503297 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod308b21fc_c8dc_4f88_a842_a5517b2bb2e9.slice/crio-a2c6975228baebe40d506ac931376aa7166669d97ae9e0e945c411c1754f8a5d WatchSource:0}: Error finding container a2c6975228baebe40d506ac931376aa7166669d97ae9e0e945c411c1754f8a5d: Status 404 returned error can't find the container with id a2c6975228baebe40d506ac931376aa7166669d97ae9e0e945c411c1754f8a5d Sep 29 09:46:01 crc kubenswrapper[4779]: I0929 09:46:01.555411 4779 reconciler_common.go:293] "Volume detached for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/b3627e10-4513-49fd-bdf5-0a83db9d8561-thanos-prometheus-http-client-file\") on node \"crc\" DevicePath \"\"" Sep 29 09:46:01 crc kubenswrapper[4779]: I0929 09:46:01.555458 4779 reconciler_common.go:293] "Volume detached for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/b3627e10-4513-49fd-bdf5-0a83db9d8561-config-out\") on node \"crc\" DevicePath \"\"" Sep 29 09:46:01 crc kubenswrapper[4779]: I0929 09:46:01.555473 4779 reconciler_common.go:293] "Volume detached for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/b3627e10-4513-49fd-bdf5-0a83db9d8561-tls-assets\") on node \"crc\" DevicePath \"\"" Sep 29 09:46:01 crc kubenswrapper[4779]: I0929 09:46:01.555484 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/b3627e10-4513-49fd-bdf5-0a83db9d8561-config\") on node \"crc\" DevicePath \"\"" Sep 29 09:46:01 crc kubenswrapper[4779]: I0929 09:46:01.555496 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xnkhx\" (UniqueName: \"kubernetes.io/projected/b3627e10-4513-49fd-bdf5-0a83db9d8561-kube-api-access-xnkhx\") on node \"crc\" DevicePath \"\"" Sep 29 09:46:01 crc kubenswrapper[4779]: I0929 09:46:01.555508 4779 reconciler_common.go:293] "Volume detached for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/b3627e10-4513-49fd-bdf5-0a83db9d8561-web-config\") on node \"crc\" DevicePath \"\"" Sep 29 09:46:01 crc kubenswrapper[4779]: I0929 09:46:01.555553 4779 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-839bd8cf-b5d6-43ab-b010-7b66d2eb87b8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-839bd8cf-b5d6-43ab-b010-7b66d2eb87b8\") on node \"crc\" " Sep 29 09:46:01 crc kubenswrapper[4779]: I0929 09:46:01.555570 4779 reconciler_common.go:293] "Volume detached for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/b3627e10-4513-49fd-bdf5-0a83db9d8561-prometheus-metric-storage-rulefiles-0\") on node \"crc\" DevicePath \"\"" Sep 29 09:46:01 crc kubenswrapper[4779]: I0929 09:46:01.584477 4779 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Sep 29 09:46:01 crc kubenswrapper[4779]: I0929 09:46:01.584638 4779 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-839bd8cf-b5d6-43ab-b010-7b66d2eb87b8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-839bd8cf-b5d6-43ab-b010-7b66d2eb87b8") on node "crc" Sep 29 09:46:01 crc kubenswrapper[4779]: I0929 09:46:01.657091 4779 reconciler_common.go:293] "Volume detached for volume \"pvc-839bd8cf-b5d6-43ab-b010-7b66d2eb87b8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-839bd8cf-b5d6-43ab-b010-7b66d2eb87b8\") on node \"crc\" DevicePath \"\"" Sep 29 09:46:01 crc kubenswrapper[4779]: I0929 09:46:01.891873 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-q8jgk" event={"ID":"390b7729-1b5b-4581-98c4-decf700653e1","Type":"ContainerDied","Data":"4edf8dc3294defb5771ed920afb4f91232afc591ce89eae1b557eb052b04ff62"} Sep 29 09:46:01 crc kubenswrapper[4779]: I0929 09:46:01.891945 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4edf8dc3294defb5771ed920afb4f91232afc591ce89eae1b557eb052b04ff62" Sep 29 09:46:01 crc kubenswrapper[4779]: I0929 09:46:01.892684 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-q8jgk" Sep 29 09:46:01 crc kubenswrapper[4779]: I0929 09:46:01.894242 4779 generic.go:334] "Generic (PLEG): container finished" podID="308b21fc-c8dc-4f88-a842-a5517b2bb2e9" containerID="0d9fa0303943299319ae8db9134c941057c1848ba3bef4d7fe0d2bc9caf937db" exitCode=0 Sep 29 09:46:01 crc kubenswrapper[4779]: I0929 09:46:01.894299 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-0ed1-account-create-lmlvp" event={"ID":"308b21fc-c8dc-4f88-a842-a5517b2bb2e9","Type":"ContainerDied","Data":"0d9fa0303943299319ae8db9134c941057c1848ba3bef4d7fe0d2bc9caf937db"} Sep 29 09:46:01 crc kubenswrapper[4779]: I0929 09:46:01.894340 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-0ed1-account-create-lmlvp" event={"ID":"308b21fc-c8dc-4f88-a842-a5517b2bb2e9","Type":"ContainerStarted","Data":"a2c6975228baebe40d506ac931376aa7166669d97ae9e0e945c411c1754f8a5d"} Sep 29 09:46:01 crc kubenswrapper[4779]: I0929 09:46:01.896612 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-notifications-server-0" event={"ID":"daa6f578-ea44-4555-be0c-e2b8662386f0","Type":"ContainerStarted","Data":"2e8d4a8ee665725ebd5246e07f4fd32dd3575a9561fd2792d30ea3edb7de8d98"} Sep 29 09:46:01 crc kubenswrapper[4779]: I0929 09:46:01.896861 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-notifications-server-0" Sep 29 09:46:01 crc kubenswrapper[4779]: I0929 09:46:01.899749 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"b3627e10-4513-49fd-bdf5-0a83db9d8561","Type":"ContainerDied","Data":"e6ca92459233eba526c312174b0fda7da03a5bd1f1923132652448d46ae5dc09"} Sep 29 09:46:01 crc kubenswrapper[4779]: I0929 09:46:01.899768 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Sep 29 09:46:01 crc kubenswrapper[4779]: I0929 09:46:01.899814 4779 scope.go:117] "RemoveContainer" containerID="e21b42fa2675e97e76e6b35006c8c61e3f4f5224c2e2a02cc02e44a4178bbeff" Sep 29 09:46:01 crc kubenswrapper[4779]: I0929 09:46:01.903653 4779 generic.go:334] "Generic (PLEG): container finished" podID="779f829e-6240-47a5-8d8d-9e279d316df7" containerID="02726faa48a5722b4e3e907fbde7d7ab6b4a0c58e2de3b1c06eb717c506392ff" exitCode=0 Sep 29 09:46:01 crc kubenswrapper[4779]: I0929 09:46:01.903707 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"779f829e-6240-47a5-8d8d-9e279d316df7","Type":"ContainerDied","Data":"02726faa48a5722b4e3e907fbde7d7ab6b4a0c58e2de3b1c06eb717c506392ff"} Sep 29 09:46:01 crc kubenswrapper[4779]: I0929 09:46:01.905433 4779 generic.go:334] "Generic (PLEG): container finished" podID="8faade2a-9a07-45b9-99e4-b448b64afaaa" containerID="d06451a6c3888fe27bb17e40eff8efe95ff1aed4e982b6c94a8c786dad8579f6" exitCode=0 Sep 29 09:46:01 crc kubenswrapper[4779]: I0929 09:46:01.905822 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"8faade2a-9a07-45b9-99e4-b448b64afaaa","Type":"ContainerDied","Data":"d06451a6c3888fe27bb17e40eff8efe95ff1aed4e982b6c94a8c786dad8579f6"} Sep 29 09:46:01 crc kubenswrapper[4779]: I0929 09:46:01.985804 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-notifications-server-0" podStartSLOduration=48.510807055 podStartE2EDuration="57.985788383s" podCreationTimestamp="2025-09-29 09:45:04 +0000 UTC" firstStartedPulling="2025-09-29 09:45:18.185259709 +0000 UTC m=+950.166583613" lastFinishedPulling="2025-09-29 09:45:27.660241037 +0000 UTC m=+959.641564941" observedRunningTime="2025-09-29 09:46:01.978146742 +0000 UTC m=+993.959470666" watchObservedRunningTime="2025-09-29 09:46:01.985788383 +0000 UTC m=+993.967112287" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.069857 4779 scope.go:117] "RemoveContainer" containerID="06724ab6700ef6e96d780244b0fac2c116335626451a7b19f3348e2e30349e9d" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.102705 4779 scope.go:117] "RemoveContainer" containerID="983afb67983de941bc23e57c73e1b3978bc75d1b2cd81623560322c36843d4f5" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.138183 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.150245 4779 scope.go:117] "RemoveContainer" containerID="835a58cf4237846af5fe498031d03b152802d1e9a85697ab90dfb70b61219995" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.156410 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/prometheus-metric-storage-0"] Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.161609 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Sep 29 09:46:02 crc kubenswrapper[4779]: E0929 09:46:02.161944 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3627e10-4513-49fd-bdf5-0a83db9d8561" containerName="thanos-sidecar" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.161959 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3627e10-4513-49fd-bdf5-0a83db9d8561" containerName="thanos-sidecar" Sep 29 09:46:02 crc kubenswrapper[4779]: E0929 09:46:02.161974 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3627e10-4513-49fd-bdf5-0a83db9d8561" containerName="init-config-reloader" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.161982 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3627e10-4513-49fd-bdf5-0a83db9d8561" containerName="init-config-reloader" Sep 29 09:46:02 crc kubenswrapper[4779]: E0929 09:46:02.161994 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3627e10-4513-49fd-bdf5-0a83db9d8561" containerName="prometheus" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.162000 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3627e10-4513-49fd-bdf5-0a83db9d8561" containerName="prometheus" Sep 29 09:46:02 crc kubenswrapper[4779]: E0929 09:46:02.162013 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3627e10-4513-49fd-bdf5-0a83db9d8561" containerName="config-reloader" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.162019 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3627e10-4513-49fd-bdf5-0a83db9d8561" containerName="config-reloader" Sep 29 09:46:02 crc kubenswrapper[4779]: E0929 09:46:02.162031 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="390b7729-1b5b-4581-98c4-decf700653e1" containerName="mariadb-database-create" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.162036 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="390b7729-1b5b-4581-98c4-decf700653e1" containerName="mariadb-database-create" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.162179 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3627e10-4513-49fd-bdf5-0a83db9d8561" containerName="prometheus" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.162203 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3627e10-4513-49fd-bdf5-0a83db9d8561" containerName="thanos-sidecar" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.162215 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="390b7729-1b5b-4581-98c4-decf700653e1" containerName="mariadb-database-create" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.162234 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3627e10-4513-49fd-bdf5-0a83db9d8561" containerName="config-reloader" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.163655 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.170691 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.170818 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-metric-storage-prometheus-svc" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.170888 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.171007 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.182489 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.184028 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.184937 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-zgl4h" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.195485 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.272166 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/1c5a9de7-c943-4654-bb1a-087fd1bb739e-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"1c5a9de7-c943-4654-bb1a-087fd1bb739e\") " pod="openstack/prometheus-metric-storage-0" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.272209 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/1c5a9de7-c943-4654-bb1a-087fd1bb739e-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"1c5a9de7-c943-4654-bb1a-087fd1bb739e\") " pod="openstack/prometheus-metric-storage-0" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.272236 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/1c5a9de7-c943-4654-bb1a-087fd1bb739e-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"1c5a9de7-c943-4654-bb1a-087fd1bb739e\") " pod="openstack/prometheus-metric-storage-0" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.272262 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/1c5a9de7-c943-4654-bb1a-087fd1bb739e-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"1c5a9de7-c943-4654-bb1a-087fd1bb739e\") " pod="openstack/prometheus-metric-storage-0" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.272288 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-839bd8cf-b5d6-43ab-b010-7b66d2eb87b8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-839bd8cf-b5d6-43ab-b010-7b66d2eb87b8\") pod \"prometheus-metric-storage-0\" (UID: \"1c5a9de7-c943-4654-bb1a-087fd1bb739e\") " pod="openstack/prometheus-metric-storage-0" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.272322 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mwc8r\" (UniqueName: \"kubernetes.io/projected/1c5a9de7-c943-4654-bb1a-087fd1bb739e-kube-api-access-mwc8r\") pod \"prometheus-metric-storage-0\" (UID: \"1c5a9de7-c943-4654-bb1a-087fd1bb739e\") " pod="openstack/prometheus-metric-storage-0" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.272349 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/1c5a9de7-c943-4654-bb1a-087fd1bb739e-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"1c5a9de7-c943-4654-bb1a-087fd1bb739e\") " pod="openstack/prometheus-metric-storage-0" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.272374 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/1c5a9de7-c943-4654-bb1a-087fd1bb739e-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"1c5a9de7-c943-4654-bb1a-087fd1bb739e\") " pod="openstack/prometheus-metric-storage-0" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.272414 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/1c5a9de7-c943-4654-bb1a-087fd1bb739e-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"1c5a9de7-c943-4654-bb1a-087fd1bb739e\") " pod="openstack/prometheus-metric-storage-0" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.272439 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c5a9de7-c943-4654-bb1a-087fd1bb739e-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"1c5a9de7-c943-4654-bb1a-087fd1bb739e\") " pod="openstack/prometheus-metric-storage-0" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.272464 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/1c5a9de7-c943-4654-bb1a-087fd1bb739e-config\") pod \"prometheus-metric-storage-0\" (UID: \"1c5a9de7-c943-4654-bb1a-087fd1bb739e\") " pod="openstack/prometheus-metric-storage-0" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.362216 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-sr2s6" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.374500 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mwc8r\" (UniqueName: \"kubernetes.io/projected/1c5a9de7-c943-4654-bb1a-087fd1bb739e-kube-api-access-mwc8r\") pod \"prometheus-metric-storage-0\" (UID: \"1c5a9de7-c943-4654-bb1a-087fd1bb739e\") " pod="openstack/prometheus-metric-storage-0" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.374809 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/1c5a9de7-c943-4654-bb1a-087fd1bb739e-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"1c5a9de7-c943-4654-bb1a-087fd1bb739e\") " pod="openstack/prometheus-metric-storage-0" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.374838 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/1c5a9de7-c943-4654-bb1a-087fd1bb739e-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"1c5a9de7-c943-4654-bb1a-087fd1bb739e\") " pod="openstack/prometheus-metric-storage-0" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.374885 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/1c5a9de7-c943-4654-bb1a-087fd1bb739e-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"1c5a9de7-c943-4654-bb1a-087fd1bb739e\") " pod="openstack/prometheus-metric-storage-0" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.374923 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c5a9de7-c943-4654-bb1a-087fd1bb739e-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"1c5a9de7-c943-4654-bb1a-087fd1bb739e\") " pod="openstack/prometheus-metric-storage-0" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.374952 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/1c5a9de7-c943-4654-bb1a-087fd1bb739e-config\") pod \"prometheus-metric-storage-0\" (UID: \"1c5a9de7-c943-4654-bb1a-087fd1bb739e\") " pod="openstack/prometheus-metric-storage-0" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.374982 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/1c5a9de7-c943-4654-bb1a-087fd1bb739e-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"1c5a9de7-c943-4654-bb1a-087fd1bb739e\") " pod="openstack/prometheus-metric-storage-0" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.375002 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/1c5a9de7-c943-4654-bb1a-087fd1bb739e-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"1c5a9de7-c943-4654-bb1a-087fd1bb739e\") " pod="openstack/prometheus-metric-storage-0" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.375024 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/1c5a9de7-c943-4654-bb1a-087fd1bb739e-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"1c5a9de7-c943-4654-bb1a-087fd1bb739e\") " pod="openstack/prometheus-metric-storage-0" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.375049 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/1c5a9de7-c943-4654-bb1a-087fd1bb739e-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"1c5a9de7-c943-4654-bb1a-087fd1bb739e\") " pod="openstack/prometheus-metric-storage-0" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.375074 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-839bd8cf-b5d6-43ab-b010-7b66d2eb87b8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-839bd8cf-b5d6-43ab-b010-7b66d2eb87b8\") pod \"prometheus-metric-storage-0\" (UID: \"1c5a9de7-c943-4654-bb1a-087fd1bb739e\") " pod="openstack/prometheus-metric-storage-0" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.378987 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/1c5a9de7-c943-4654-bb1a-087fd1bb739e-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"1c5a9de7-c943-4654-bb1a-087fd1bb739e\") " pod="openstack/prometheus-metric-storage-0" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.383038 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/1c5a9de7-c943-4654-bb1a-087fd1bb739e-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"1c5a9de7-c943-4654-bb1a-087fd1bb739e\") " pod="openstack/prometheus-metric-storage-0" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.383451 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/1c5a9de7-c943-4654-bb1a-087fd1bb739e-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"1c5a9de7-c943-4654-bb1a-087fd1bb739e\") " pod="openstack/prometheus-metric-storage-0" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.383516 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/1c5a9de7-c943-4654-bb1a-087fd1bb739e-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"1c5a9de7-c943-4654-bb1a-087fd1bb739e\") " pod="openstack/prometheus-metric-storage-0" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.384472 4779 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.384496 4779 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-839bd8cf-b5d6-43ab-b010-7b66d2eb87b8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-839bd8cf-b5d6-43ab-b010-7b66d2eb87b8\") pod \"prometheus-metric-storage-0\" (UID: \"1c5a9de7-c943-4654-bb1a-087fd1bb739e\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/d7c28a7a22cfe960b99bbb5b934acd1f650db36f185879457a9343b648a1e5b0/globalmount\"" pod="openstack/prometheus-metric-storage-0" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.384590 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/1c5a9de7-c943-4654-bb1a-087fd1bb739e-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"1c5a9de7-c943-4654-bb1a-087fd1bb739e\") " pod="openstack/prometheus-metric-storage-0" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.385508 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/1c5a9de7-c943-4654-bb1a-087fd1bb739e-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"1c5a9de7-c943-4654-bb1a-087fd1bb739e\") " pod="openstack/prometheus-metric-storage-0" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.385581 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/1c5a9de7-c943-4654-bb1a-087fd1bb739e-config\") pod \"prometheus-metric-storage-0\" (UID: \"1c5a9de7-c943-4654-bb1a-087fd1bb739e\") " pod="openstack/prometheus-metric-storage-0" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.388111 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c5a9de7-c943-4654-bb1a-087fd1bb739e-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"1c5a9de7-c943-4654-bb1a-087fd1bb739e\") " pod="openstack/prometheus-metric-storage-0" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.392402 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/1c5a9de7-c943-4654-bb1a-087fd1bb739e-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"1c5a9de7-c943-4654-bb1a-087fd1bb739e\") " pod="openstack/prometheus-metric-storage-0" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.400150 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-7fff-account-create-zgx74" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.402873 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mwc8r\" (UniqueName: \"kubernetes.io/projected/1c5a9de7-c943-4654-bb1a-087fd1bb739e-kube-api-access-mwc8r\") pod \"prometheus-metric-storage-0\" (UID: \"1c5a9de7-c943-4654-bb1a-087fd1bb739e\") " pod="openstack/prometheus-metric-storage-0" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.434487 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-839bd8cf-b5d6-43ab-b010-7b66d2eb87b8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-839bd8cf-b5d6-43ab-b010-7b66d2eb87b8\") pod \"prometheus-metric-storage-0\" (UID: \"1c5a9de7-c943-4654-bb1a-087fd1bb739e\") " pod="openstack/prometheus-metric-storage-0" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.476165 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6nkkz\" (UniqueName: \"kubernetes.io/projected/7180a99e-b8b5-4c57-9bbe-221f6b83bb16-kube-api-access-6nkkz\") pod \"7180a99e-b8b5-4c57-9bbe-221f6b83bb16\" (UID: \"7180a99e-b8b5-4c57-9bbe-221f6b83bb16\") " Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.476223 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sd4sl\" (UniqueName: \"kubernetes.io/projected/b4c0f69c-d549-4d1d-b893-01aac180cf13-kube-api-access-sd4sl\") pod \"b4c0f69c-d549-4d1d-b893-01aac180cf13\" (UID: \"b4c0f69c-d549-4d1d-b893-01aac180cf13\") " Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.480022 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7180a99e-b8b5-4c57-9bbe-221f6b83bb16-kube-api-access-6nkkz" (OuterVolumeSpecName: "kube-api-access-6nkkz") pod "7180a99e-b8b5-4c57-9bbe-221f6b83bb16" (UID: "7180a99e-b8b5-4c57-9bbe-221f6b83bb16"). InnerVolumeSpecName "kube-api-access-6nkkz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.481151 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b4c0f69c-d549-4d1d-b893-01aac180cf13-kube-api-access-sd4sl" (OuterVolumeSpecName: "kube-api-access-sd4sl") pod "b4c0f69c-d549-4d1d-b893-01aac180cf13" (UID: "b4c0f69c-d549-4d1d-b893-01aac180cf13"). InnerVolumeSpecName "kube-api-access-sd4sl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.548705 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.577743 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6nkkz\" (UniqueName: \"kubernetes.io/projected/7180a99e-b8b5-4c57-9bbe-221f6b83bb16-kube-api-access-6nkkz\") on node \"crc\" DevicePath \"\"" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.577780 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sd4sl\" (UniqueName: \"kubernetes.io/projected/b4c0f69c-d549-4d1d-b893-01aac180cf13-kube-api-access-sd4sl\") on node \"crc\" DevicePath \"\"" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.729235 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b3627e10-4513-49fd-bdf5-0a83db9d8561" path="/var/lib/kubelet/pods/b3627e10-4513-49fd-bdf5-0a83db9d8561/volumes" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.914182 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-7fff-account-create-zgx74" event={"ID":"b4c0f69c-d549-4d1d-b893-01aac180cf13","Type":"ContainerDied","Data":"5aee6319d37e06f88f7e1d4502cf33348c2515ae208de0fe50246b867442a6d6"} Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.914224 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5aee6319d37e06f88f7e1d4502cf33348c2515ae208de0fe50246b867442a6d6" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.914202 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-7fff-account-create-zgx74" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.916866 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"779f829e-6240-47a5-8d8d-9e279d316df7","Type":"ContainerStarted","Data":"08bb2d3249f22cfeeeba3ee4988fbc07f5810e42b8ff533515a1e185c10605e2"} Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.917575 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.919362 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"8faade2a-9a07-45b9-99e4-b448b64afaaa","Type":"ContainerStarted","Data":"9b36ddd7a7d61bb24461a2467c281c67c711b0a2f731a9d0c87617f0e20b252b"} Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.919558 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.920986 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-sr2s6" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.921330 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-sr2s6" event={"ID":"7180a99e-b8b5-4c57-9bbe-221f6b83bb16","Type":"ContainerDied","Data":"8cc825b0d5a80886e71b4eaa1c542fee3a4ae9e218e7fd73e6555f439192d4d8"} Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.921352 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8cc825b0d5a80886e71b4eaa1c542fee3a4ae9e218e7fd73e6555f439192d4d8" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.949155 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=48.174790215 podStartE2EDuration="58.949139533s" podCreationTimestamp="2025-09-29 09:45:04 +0000 UTC" firstStartedPulling="2025-09-29 09:45:18.386747621 +0000 UTC m=+950.368071525" lastFinishedPulling="2025-09-29 09:45:29.161096939 +0000 UTC m=+961.142420843" observedRunningTime="2025-09-29 09:46:02.943897852 +0000 UTC m=+994.925221756" watchObservedRunningTime="2025-09-29 09:46:02.949139533 +0000 UTC m=+994.930463437" Sep 29 09:46:02 crc kubenswrapper[4779]: I0929 09:46:02.977966 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=48.798138038 podStartE2EDuration="59.977943694s" podCreationTimestamp="2025-09-29 09:45:03 +0000 UTC" firstStartedPulling="2025-09-29 09:45:18.082484418 +0000 UTC m=+950.063808322" lastFinishedPulling="2025-09-29 09:45:29.262290074 +0000 UTC m=+961.243613978" observedRunningTime="2025-09-29 09:46:02.974310299 +0000 UTC m=+994.955634213" watchObservedRunningTime="2025-09-29 09:46:02.977943694 +0000 UTC m=+994.959267598" Sep 29 09:46:03 crc kubenswrapper[4779]: I0929 09:46:03.040226 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Sep 29 09:46:03 crc kubenswrapper[4779]: I0929 09:46:03.274593 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-0ed1-account-create-lmlvp" Sep 29 09:46:03 crc kubenswrapper[4779]: I0929 09:46:03.389217 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8mkpw\" (UniqueName: \"kubernetes.io/projected/308b21fc-c8dc-4f88-a842-a5517b2bb2e9-kube-api-access-8mkpw\") pod \"308b21fc-c8dc-4f88-a842-a5517b2bb2e9\" (UID: \"308b21fc-c8dc-4f88-a842-a5517b2bb2e9\") " Sep 29 09:46:03 crc kubenswrapper[4779]: I0929 09:46:03.392826 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308b21fc-c8dc-4f88-a842-a5517b2bb2e9-kube-api-access-8mkpw" (OuterVolumeSpecName: "kube-api-access-8mkpw") pod "308b21fc-c8dc-4f88-a842-a5517b2bb2e9" (UID: "308b21fc-c8dc-4f88-a842-a5517b2bb2e9"). InnerVolumeSpecName "kube-api-access-8mkpw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:46:03 crc kubenswrapper[4779]: I0929 09:46:03.490809 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8mkpw\" (UniqueName: \"kubernetes.io/projected/308b21fc-c8dc-4f88-a842-a5517b2bb2e9-kube-api-access-8mkpw\") on node \"crc\" DevicePath \"\"" Sep 29 09:46:03 crc kubenswrapper[4779]: I0929 09:46:03.930032 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-dzjsq" podUID="290bdf85-850a-4b79-85f7-dc2e662e0ae9" containerName="ovn-controller" probeResult="failure" output=< Sep 29 09:46:03 crc kubenswrapper[4779]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Sep 29 09:46:03 crc kubenswrapper[4779]: > Sep 29 09:46:03 crc kubenswrapper[4779]: I0929 09:46:03.931265 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"1c5a9de7-c943-4654-bb1a-087fd1bb739e","Type":"ContainerStarted","Data":"e37e8ea7604fb2c6fba0963aad6d9aa60627e04edf4d3b9d49bb6006bb3a1e26"} Sep 29 09:46:03 crc kubenswrapper[4779]: I0929 09:46:03.932775 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-0ed1-account-create-lmlvp" event={"ID":"308b21fc-c8dc-4f88-a842-a5517b2bb2e9","Type":"ContainerDied","Data":"a2c6975228baebe40d506ac931376aa7166669d97ae9e0e945c411c1754f8a5d"} Sep 29 09:46:03 crc kubenswrapper[4779]: I0929 09:46:03.932817 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a2c6975228baebe40d506ac931376aa7166669d97ae9e0e945c411c1754f8a5d" Sep 29 09:46:03 crc kubenswrapper[4779]: I0929 09:46:03.932798 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-0ed1-account-create-lmlvp" Sep 29 09:46:04 crc kubenswrapper[4779]: I0929 09:46:04.349710 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-dnnxk"] Sep 29 09:46:04 crc kubenswrapper[4779]: E0929 09:46:04.350193 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7180a99e-b8b5-4c57-9bbe-221f6b83bb16" containerName="mariadb-database-create" Sep 29 09:46:04 crc kubenswrapper[4779]: I0929 09:46:04.350218 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="7180a99e-b8b5-4c57-9bbe-221f6b83bb16" containerName="mariadb-database-create" Sep 29 09:46:04 crc kubenswrapper[4779]: E0929 09:46:04.350235 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="308b21fc-c8dc-4f88-a842-a5517b2bb2e9" containerName="mariadb-account-create" Sep 29 09:46:04 crc kubenswrapper[4779]: I0929 09:46:04.350243 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="308b21fc-c8dc-4f88-a842-a5517b2bb2e9" containerName="mariadb-account-create" Sep 29 09:46:04 crc kubenswrapper[4779]: E0929 09:46:04.350261 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4c0f69c-d549-4d1d-b893-01aac180cf13" containerName="mariadb-account-create" Sep 29 09:46:04 crc kubenswrapper[4779]: I0929 09:46:04.350269 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4c0f69c-d549-4d1d-b893-01aac180cf13" containerName="mariadb-account-create" Sep 29 09:46:04 crc kubenswrapper[4779]: I0929 09:46:04.350447 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="7180a99e-b8b5-4c57-9bbe-221f6b83bb16" containerName="mariadb-database-create" Sep 29 09:46:04 crc kubenswrapper[4779]: I0929 09:46:04.350492 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="308b21fc-c8dc-4f88-a842-a5517b2bb2e9" containerName="mariadb-account-create" Sep 29 09:46:04 crc kubenswrapper[4779]: I0929 09:46:04.350503 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="b4c0f69c-d549-4d1d-b893-01aac180cf13" containerName="mariadb-account-create" Sep 29 09:46:04 crc kubenswrapper[4779]: I0929 09:46:04.351164 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-dnnxk" Sep 29 09:46:04 crc kubenswrapper[4779]: I0929 09:46:04.353208 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-rkfkk" Sep 29 09:46:04 crc kubenswrapper[4779]: I0929 09:46:04.361518 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-dnnxk"] Sep 29 09:46:04 crc kubenswrapper[4779]: I0929 09:46:04.362036 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Sep 29 09:46:04 crc kubenswrapper[4779]: I0929 09:46:04.405884 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b16b7fa7-e1c5-4368-9f6b-cc5a7526671f-config-data\") pod \"glance-db-sync-dnnxk\" (UID: \"b16b7fa7-e1c5-4368-9f6b-cc5a7526671f\") " pod="openstack/glance-db-sync-dnnxk" Sep 29 09:46:04 crc kubenswrapper[4779]: I0929 09:46:04.405963 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b16b7fa7-e1c5-4368-9f6b-cc5a7526671f-db-sync-config-data\") pod \"glance-db-sync-dnnxk\" (UID: \"b16b7fa7-e1c5-4368-9f6b-cc5a7526671f\") " pod="openstack/glance-db-sync-dnnxk" Sep 29 09:46:04 crc kubenswrapper[4779]: I0929 09:46:04.406066 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8vkf2\" (UniqueName: \"kubernetes.io/projected/b16b7fa7-e1c5-4368-9f6b-cc5a7526671f-kube-api-access-8vkf2\") pod \"glance-db-sync-dnnxk\" (UID: \"b16b7fa7-e1c5-4368-9f6b-cc5a7526671f\") " pod="openstack/glance-db-sync-dnnxk" Sep 29 09:46:04 crc kubenswrapper[4779]: I0929 09:46:04.406098 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b16b7fa7-e1c5-4368-9f6b-cc5a7526671f-combined-ca-bundle\") pod \"glance-db-sync-dnnxk\" (UID: \"b16b7fa7-e1c5-4368-9f6b-cc5a7526671f\") " pod="openstack/glance-db-sync-dnnxk" Sep 29 09:46:04 crc kubenswrapper[4779]: I0929 09:46:04.507635 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b16b7fa7-e1c5-4368-9f6b-cc5a7526671f-db-sync-config-data\") pod \"glance-db-sync-dnnxk\" (UID: \"b16b7fa7-e1c5-4368-9f6b-cc5a7526671f\") " pod="openstack/glance-db-sync-dnnxk" Sep 29 09:46:04 crc kubenswrapper[4779]: I0929 09:46:04.507782 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8vkf2\" (UniqueName: \"kubernetes.io/projected/b16b7fa7-e1c5-4368-9f6b-cc5a7526671f-kube-api-access-8vkf2\") pod \"glance-db-sync-dnnxk\" (UID: \"b16b7fa7-e1c5-4368-9f6b-cc5a7526671f\") " pod="openstack/glance-db-sync-dnnxk" Sep 29 09:46:04 crc kubenswrapper[4779]: I0929 09:46:04.507811 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b16b7fa7-e1c5-4368-9f6b-cc5a7526671f-combined-ca-bundle\") pod \"glance-db-sync-dnnxk\" (UID: \"b16b7fa7-e1c5-4368-9f6b-cc5a7526671f\") " pod="openstack/glance-db-sync-dnnxk" Sep 29 09:46:04 crc kubenswrapper[4779]: I0929 09:46:04.507863 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b16b7fa7-e1c5-4368-9f6b-cc5a7526671f-config-data\") pod \"glance-db-sync-dnnxk\" (UID: \"b16b7fa7-e1c5-4368-9f6b-cc5a7526671f\") " pod="openstack/glance-db-sync-dnnxk" Sep 29 09:46:04 crc kubenswrapper[4779]: I0929 09:46:04.522732 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b16b7fa7-e1c5-4368-9f6b-cc5a7526671f-config-data\") pod \"glance-db-sync-dnnxk\" (UID: \"b16b7fa7-e1c5-4368-9f6b-cc5a7526671f\") " pod="openstack/glance-db-sync-dnnxk" Sep 29 09:46:04 crc kubenswrapper[4779]: I0929 09:46:04.522757 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b16b7fa7-e1c5-4368-9f6b-cc5a7526671f-combined-ca-bundle\") pod \"glance-db-sync-dnnxk\" (UID: \"b16b7fa7-e1c5-4368-9f6b-cc5a7526671f\") " pod="openstack/glance-db-sync-dnnxk" Sep 29 09:46:04 crc kubenswrapper[4779]: I0929 09:46:04.525360 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b16b7fa7-e1c5-4368-9f6b-cc5a7526671f-db-sync-config-data\") pod \"glance-db-sync-dnnxk\" (UID: \"b16b7fa7-e1c5-4368-9f6b-cc5a7526671f\") " pod="openstack/glance-db-sync-dnnxk" Sep 29 09:46:04 crc kubenswrapper[4779]: I0929 09:46:04.526674 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8vkf2\" (UniqueName: \"kubernetes.io/projected/b16b7fa7-e1c5-4368-9f6b-cc5a7526671f-kube-api-access-8vkf2\") pod \"glance-db-sync-dnnxk\" (UID: \"b16b7fa7-e1c5-4368-9f6b-cc5a7526671f\") " pod="openstack/glance-db-sync-dnnxk" Sep 29 09:46:04 crc kubenswrapper[4779]: I0929 09:46:04.674564 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-dnnxk" Sep 29 09:46:05 crc kubenswrapper[4779]: I0929 09:46:05.284197 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-dnnxk"] Sep 29 09:46:05 crc kubenswrapper[4779]: W0929 09:46:05.288046 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb16b7fa7_e1c5_4368_9f6b_cc5a7526671f.slice/crio-493338a2ae3c6b5b2f64513c6b9edfeaf4841183ef551f7399e9f98573c92ead WatchSource:0}: Error finding container 493338a2ae3c6b5b2f64513c6b9edfeaf4841183ef551f7399e9f98573c92ead: Status 404 returned error can't find the container with id 493338a2ae3c6b5b2f64513c6b9edfeaf4841183ef551f7399e9f98573c92ead Sep 29 09:46:05 crc kubenswrapper[4779]: I0929 09:46:05.956192 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-dnnxk" event={"ID":"b16b7fa7-e1c5-4368-9f6b-cc5a7526671f","Type":"ContainerStarted","Data":"493338a2ae3c6b5b2f64513c6b9edfeaf4841183ef551f7399e9f98573c92ead"} Sep 29 09:46:05 crc kubenswrapper[4779]: I0929 09:46:05.958361 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"1c5a9de7-c943-4654-bb1a-087fd1bb739e","Type":"ContainerStarted","Data":"125698833ede566b41210de4b766e59874ee11f0b7ab1025b0525519921f098f"} Sep 29 09:46:08 crc kubenswrapper[4779]: I0929 09:46:08.609293 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-9c17-account-create-77v8l"] Sep 29 09:46:08 crc kubenswrapper[4779]: I0929 09:46:08.610637 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-9c17-account-create-77v8l" Sep 29 09:46:08 crc kubenswrapper[4779]: I0929 09:46:08.612945 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Sep 29 09:46:08 crc kubenswrapper[4779]: I0929 09:46:08.619080 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-9c17-account-create-77v8l"] Sep 29 09:46:08 crc kubenswrapper[4779]: I0929 09:46:08.674561 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p47kd\" (UniqueName: \"kubernetes.io/projected/8fdebe28-420d-4e9c-bc14-cd59fcb284bd-kube-api-access-p47kd\") pod \"keystone-9c17-account-create-77v8l\" (UID: \"8fdebe28-420d-4e9c-bc14-cd59fcb284bd\") " pod="openstack/keystone-9c17-account-create-77v8l" Sep 29 09:46:08 crc kubenswrapper[4779]: I0929 09:46:08.776597 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p47kd\" (UniqueName: \"kubernetes.io/projected/8fdebe28-420d-4e9c-bc14-cd59fcb284bd-kube-api-access-p47kd\") pod \"keystone-9c17-account-create-77v8l\" (UID: \"8fdebe28-420d-4e9c-bc14-cd59fcb284bd\") " pod="openstack/keystone-9c17-account-create-77v8l" Sep 29 09:46:08 crc kubenswrapper[4779]: I0929 09:46:08.799709 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p47kd\" (UniqueName: \"kubernetes.io/projected/8fdebe28-420d-4e9c-bc14-cd59fcb284bd-kube-api-access-p47kd\") pod \"keystone-9c17-account-create-77v8l\" (UID: \"8fdebe28-420d-4e9c-bc14-cd59fcb284bd\") " pod="openstack/keystone-9c17-account-create-77v8l" Sep 29 09:46:08 crc kubenswrapper[4779]: I0929 09:46:08.905412 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-5200-account-create-lfnrs"] Sep 29 09:46:08 crc kubenswrapper[4779]: I0929 09:46:08.906529 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-5200-account-create-lfnrs" Sep 29 09:46:08 crc kubenswrapper[4779]: I0929 09:46:08.908460 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Sep 29 09:46:08 crc kubenswrapper[4779]: I0929 09:46:08.917844 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-5200-account-create-lfnrs"] Sep 29 09:46:08 crc kubenswrapper[4779]: I0929 09:46:08.936346 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-9c17-account-create-77v8l" Sep 29 09:46:08 crc kubenswrapper[4779]: I0929 09:46:08.973168 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-bbznl" Sep 29 09:46:08 crc kubenswrapper[4779]: I0929 09:46:08.974165 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-bbznl" Sep 29 09:46:08 crc kubenswrapper[4779]: I0929 09:46:08.975312 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-dzjsq" podUID="290bdf85-850a-4b79-85f7-dc2e662e0ae9" containerName="ovn-controller" probeResult="failure" output=< Sep 29 09:46:08 crc kubenswrapper[4779]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Sep 29 09:46:08 crc kubenswrapper[4779]: > Sep 29 09:46:08 crc kubenswrapper[4779]: I0929 09:46:08.979660 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tj46x\" (UniqueName: \"kubernetes.io/projected/2d815e2e-6933-4792-97ef-b22c5d1df8d0-kube-api-access-tj46x\") pod \"placement-5200-account-create-lfnrs\" (UID: \"2d815e2e-6933-4792-97ef-b22c5d1df8d0\") " pod="openstack/placement-5200-account-create-lfnrs" Sep 29 09:46:09 crc kubenswrapper[4779]: I0929 09:46:09.082183 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tj46x\" (UniqueName: \"kubernetes.io/projected/2d815e2e-6933-4792-97ef-b22c5d1df8d0-kube-api-access-tj46x\") pod \"placement-5200-account-create-lfnrs\" (UID: \"2d815e2e-6933-4792-97ef-b22c5d1df8d0\") " pod="openstack/placement-5200-account-create-lfnrs" Sep 29 09:46:09 crc kubenswrapper[4779]: I0929 09:46:09.111418 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tj46x\" (UniqueName: \"kubernetes.io/projected/2d815e2e-6933-4792-97ef-b22c5d1df8d0-kube-api-access-tj46x\") pod \"placement-5200-account-create-lfnrs\" (UID: \"2d815e2e-6933-4792-97ef-b22c5d1df8d0\") " pod="openstack/placement-5200-account-create-lfnrs" Sep 29 09:46:09 crc kubenswrapper[4779]: I0929 09:46:09.212131 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-dzjsq-config-gx6js"] Sep 29 09:46:09 crc kubenswrapper[4779]: I0929 09:46:09.220858 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-dzjsq-config-gx6js" Sep 29 09:46:09 crc kubenswrapper[4779]: I0929 09:46:09.223365 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Sep 29 09:46:09 crc kubenswrapper[4779]: I0929 09:46:09.227039 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-5200-account-create-lfnrs" Sep 29 09:46:09 crc kubenswrapper[4779]: I0929 09:46:09.230436 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-dzjsq-config-gx6js"] Sep 29 09:46:09 crc kubenswrapper[4779]: I0929 09:46:09.293072 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/bcc1ddb2-0140-4169-b209-ebf6d5833599-var-run\") pod \"ovn-controller-dzjsq-config-gx6js\" (UID: \"bcc1ddb2-0140-4169-b209-ebf6d5833599\") " pod="openstack/ovn-controller-dzjsq-config-gx6js" Sep 29 09:46:09 crc kubenswrapper[4779]: I0929 09:46:09.395434 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/bcc1ddb2-0140-4169-b209-ebf6d5833599-var-run\") pod \"ovn-controller-dzjsq-config-gx6js\" (UID: \"bcc1ddb2-0140-4169-b209-ebf6d5833599\") " pod="openstack/ovn-controller-dzjsq-config-gx6js" Sep 29 09:46:09 crc kubenswrapper[4779]: I0929 09:46:09.395493 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bcc1ddb2-0140-4169-b209-ebf6d5833599-scripts\") pod \"ovn-controller-dzjsq-config-gx6js\" (UID: \"bcc1ddb2-0140-4169-b209-ebf6d5833599\") " pod="openstack/ovn-controller-dzjsq-config-gx6js" Sep 29 09:46:09 crc kubenswrapper[4779]: I0929 09:46:09.395510 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/bcc1ddb2-0140-4169-b209-ebf6d5833599-var-run-ovn\") pod \"ovn-controller-dzjsq-config-gx6js\" (UID: \"bcc1ddb2-0140-4169-b209-ebf6d5833599\") " pod="openstack/ovn-controller-dzjsq-config-gx6js" Sep 29 09:46:09 crc kubenswrapper[4779]: I0929 09:46:09.395565 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/bcc1ddb2-0140-4169-b209-ebf6d5833599-additional-scripts\") pod \"ovn-controller-dzjsq-config-gx6js\" (UID: \"bcc1ddb2-0140-4169-b209-ebf6d5833599\") " pod="openstack/ovn-controller-dzjsq-config-gx6js" Sep 29 09:46:09 crc kubenswrapper[4779]: I0929 09:46:09.395597 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hmklm\" (UniqueName: \"kubernetes.io/projected/bcc1ddb2-0140-4169-b209-ebf6d5833599-kube-api-access-hmklm\") pod \"ovn-controller-dzjsq-config-gx6js\" (UID: \"bcc1ddb2-0140-4169-b209-ebf6d5833599\") " pod="openstack/ovn-controller-dzjsq-config-gx6js" Sep 29 09:46:09 crc kubenswrapper[4779]: I0929 09:46:09.395629 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/bcc1ddb2-0140-4169-b209-ebf6d5833599-var-log-ovn\") pod \"ovn-controller-dzjsq-config-gx6js\" (UID: \"bcc1ddb2-0140-4169-b209-ebf6d5833599\") " pod="openstack/ovn-controller-dzjsq-config-gx6js" Sep 29 09:46:09 crc kubenswrapper[4779]: I0929 09:46:09.396008 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/bcc1ddb2-0140-4169-b209-ebf6d5833599-var-run\") pod \"ovn-controller-dzjsq-config-gx6js\" (UID: \"bcc1ddb2-0140-4169-b209-ebf6d5833599\") " pod="openstack/ovn-controller-dzjsq-config-gx6js" Sep 29 09:46:09 crc kubenswrapper[4779]: I0929 09:46:09.431633 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-9c17-account-create-77v8l"] Sep 29 09:46:09 crc kubenswrapper[4779]: W0929 09:46:09.459895 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8fdebe28_420d_4e9c_bc14_cd59fcb284bd.slice/crio-c9837de769f4f93ebf534d7abf6bc6e1d98df01375acdc4ceb2f9bd81fcccaf2 WatchSource:0}: Error finding container c9837de769f4f93ebf534d7abf6bc6e1d98df01375acdc4ceb2f9bd81fcccaf2: Status 404 returned error can't find the container with id c9837de769f4f93ebf534d7abf6bc6e1d98df01375acdc4ceb2f9bd81fcccaf2 Sep 29 09:46:09 crc kubenswrapper[4779]: I0929 09:46:09.496826 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bcc1ddb2-0140-4169-b209-ebf6d5833599-scripts\") pod \"ovn-controller-dzjsq-config-gx6js\" (UID: \"bcc1ddb2-0140-4169-b209-ebf6d5833599\") " pod="openstack/ovn-controller-dzjsq-config-gx6js" Sep 29 09:46:09 crc kubenswrapper[4779]: I0929 09:46:09.496859 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/bcc1ddb2-0140-4169-b209-ebf6d5833599-var-run-ovn\") pod \"ovn-controller-dzjsq-config-gx6js\" (UID: \"bcc1ddb2-0140-4169-b209-ebf6d5833599\") " pod="openstack/ovn-controller-dzjsq-config-gx6js" Sep 29 09:46:09 crc kubenswrapper[4779]: I0929 09:46:09.496930 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/bcc1ddb2-0140-4169-b209-ebf6d5833599-additional-scripts\") pod \"ovn-controller-dzjsq-config-gx6js\" (UID: \"bcc1ddb2-0140-4169-b209-ebf6d5833599\") " pod="openstack/ovn-controller-dzjsq-config-gx6js" Sep 29 09:46:09 crc kubenswrapper[4779]: I0929 09:46:09.496967 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hmklm\" (UniqueName: \"kubernetes.io/projected/bcc1ddb2-0140-4169-b209-ebf6d5833599-kube-api-access-hmklm\") pod \"ovn-controller-dzjsq-config-gx6js\" (UID: \"bcc1ddb2-0140-4169-b209-ebf6d5833599\") " pod="openstack/ovn-controller-dzjsq-config-gx6js" Sep 29 09:46:09 crc kubenswrapper[4779]: I0929 09:46:09.497005 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/bcc1ddb2-0140-4169-b209-ebf6d5833599-var-log-ovn\") pod \"ovn-controller-dzjsq-config-gx6js\" (UID: \"bcc1ddb2-0140-4169-b209-ebf6d5833599\") " pod="openstack/ovn-controller-dzjsq-config-gx6js" Sep 29 09:46:09 crc kubenswrapper[4779]: I0929 09:46:09.497459 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/bcc1ddb2-0140-4169-b209-ebf6d5833599-var-log-ovn\") pod \"ovn-controller-dzjsq-config-gx6js\" (UID: \"bcc1ddb2-0140-4169-b209-ebf6d5833599\") " pod="openstack/ovn-controller-dzjsq-config-gx6js" Sep 29 09:46:09 crc kubenswrapper[4779]: I0929 09:46:09.497481 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/bcc1ddb2-0140-4169-b209-ebf6d5833599-var-run-ovn\") pod \"ovn-controller-dzjsq-config-gx6js\" (UID: \"bcc1ddb2-0140-4169-b209-ebf6d5833599\") " pod="openstack/ovn-controller-dzjsq-config-gx6js" Sep 29 09:46:09 crc kubenswrapper[4779]: I0929 09:46:09.498316 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/bcc1ddb2-0140-4169-b209-ebf6d5833599-additional-scripts\") pod \"ovn-controller-dzjsq-config-gx6js\" (UID: \"bcc1ddb2-0140-4169-b209-ebf6d5833599\") " pod="openstack/ovn-controller-dzjsq-config-gx6js" Sep 29 09:46:09 crc kubenswrapper[4779]: I0929 09:46:09.499485 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bcc1ddb2-0140-4169-b209-ebf6d5833599-scripts\") pod \"ovn-controller-dzjsq-config-gx6js\" (UID: \"bcc1ddb2-0140-4169-b209-ebf6d5833599\") " pod="openstack/ovn-controller-dzjsq-config-gx6js" Sep 29 09:46:09 crc kubenswrapper[4779]: I0929 09:46:09.513252 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hmklm\" (UniqueName: \"kubernetes.io/projected/bcc1ddb2-0140-4169-b209-ebf6d5833599-kube-api-access-hmklm\") pod \"ovn-controller-dzjsq-config-gx6js\" (UID: \"bcc1ddb2-0140-4169-b209-ebf6d5833599\") " pod="openstack/ovn-controller-dzjsq-config-gx6js" Sep 29 09:46:09 crc kubenswrapper[4779]: I0929 09:46:09.543890 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-dzjsq-config-gx6js" Sep 29 09:46:09 crc kubenswrapper[4779]: I0929 09:46:09.716719 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-5200-account-create-lfnrs"] Sep 29 09:46:09 crc kubenswrapper[4779]: W0929 09:46:09.725510 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2d815e2e_6933_4792_97ef_b22c5d1df8d0.slice/crio-15670ea1dc9566361ee7a5797c3daaec1143f1f259f8fb30513ff35ce0314402 WatchSource:0}: Error finding container 15670ea1dc9566361ee7a5797c3daaec1143f1f259f8fb30513ff35ce0314402: Status 404 returned error can't find the container with id 15670ea1dc9566361ee7a5797c3daaec1143f1f259f8fb30513ff35ce0314402 Sep 29 09:46:09 crc kubenswrapper[4779]: I0929 09:46:09.976317 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-dzjsq-config-gx6js"] Sep 29 09:46:10 crc kubenswrapper[4779]: I0929 09:46:10.031898 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-dzjsq-config-gx6js" event={"ID":"bcc1ddb2-0140-4169-b209-ebf6d5833599","Type":"ContainerStarted","Data":"db5ff2ac213ec6b48c943568c21fc1ab327f440d0b0d1979603ab085dbb71fd5"} Sep 29 09:46:10 crc kubenswrapper[4779]: I0929 09:46:10.034492 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-9c17-account-create-77v8l" event={"ID":"8fdebe28-420d-4e9c-bc14-cd59fcb284bd","Type":"ContainerStarted","Data":"651783ca245d77f0afa2e410b9981583f485aea154b5dcfde8e0dab42f54b11d"} Sep 29 09:46:10 crc kubenswrapper[4779]: I0929 09:46:10.034601 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-9c17-account-create-77v8l" event={"ID":"8fdebe28-420d-4e9c-bc14-cd59fcb284bd","Type":"ContainerStarted","Data":"c9837de769f4f93ebf534d7abf6bc6e1d98df01375acdc4ceb2f9bd81fcccaf2"} Sep 29 09:46:10 crc kubenswrapper[4779]: I0929 09:46:10.039483 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5200-account-create-lfnrs" event={"ID":"2d815e2e-6933-4792-97ef-b22c5d1df8d0","Type":"ContainerStarted","Data":"927a1a0110dd181766e0448083391941f565579331f56c0859febfdf38e68f4b"} Sep 29 09:46:10 crc kubenswrapper[4779]: I0929 09:46:10.039521 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5200-account-create-lfnrs" event={"ID":"2d815e2e-6933-4792-97ef-b22c5d1df8d0","Type":"ContainerStarted","Data":"15670ea1dc9566361ee7a5797c3daaec1143f1f259f8fb30513ff35ce0314402"} Sep 29 09:46:10 crc kubenswrapper[4779]: I0929 09:46:10.050933 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-9c17-account-create-77v8l" podStartSLOduration=2.05091603 podStartE2EDuration="2.05091603s" podCreationTimestamp="2025-09-29 09:46:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:46:10.045896185 +0000 UTC m=+1002.027220089" watchObservedRunningTime="2025-09-29 09:46:10.05091603 +0000 UTC m=+1002.032239924" Sep 29 09:46:10 crc kubenswrapper[4779]: I0929 09:46:10.071664 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-5200-account-create-lfnrs" podStartSLOduration=2.071646598 podStartE2EDuration="2.071646598s" podCreationTimestamp="2025-09-29 09:46:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:46:10.061584898 +0000 UTC m=+1002.042908812" watchObservedRunningTime="2025-09-29 09:46:10.071646598 +0000 UTC m=+1002.052970502" Sep 29 09:46:11 crc kubenswrapper[4779]: I0929 09:46:11.055215 4779 generic.go:334] "Generic (PLEG): container finished" podID="8fdebe28-420d-4e9c-bc14-cd59fcb284bd" containerID="651783ca245d77f0afa2e410b9981583f485aea154b5dcfde8e0dab42f54b11d" exitCode=0 Sep 29 09:46:11 crc kubenswrapper[4779]: I0929 09:46:11.055318 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-9c17-account-create-77v8l" event={"ID":"8fdebe28-420d-4e9c-bc14-cd59fcb284bd","Type":"ContainerDied","Data":"651783ca245d77f0afa2e410b9981583f485aea154b5dcfde8e0dab42f54b11d"} Sep 29 09:46:11 crc kubenswrapper[4779]: I0929 09:46:11.057449 4779 generic.go:334] "Generic (PLEG): container finished" podID="2d815e2e-6933-4792-97ef-b22c5d1df8d0" containerID="927a1a0110dd181766e0448083391941f565579331f56c0859febfdf38e68f4b" exitCode=0 Sep 29 09:46:11 crc kubenswrapper[4779]: I0929 09:46:11.057512 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5200-account-create-lfnrs" event={"ID":"2d815e2e-6933-4792-97ef-b22c5d1df8d0","Type":"ContainerDied","Data":"927a1a0110dd181766e0448083391941f565579331f56c0859febfdf38e68f4b"} Sep 29 09:46:11 crc kubenswrapper[4779]: I0929 09:46:11.059405 4779 generic.go:334] "Generic (PLEG): container finished" podID="bcc1ddb2-0140-4169-b209-ebf6d5833599" containerID="c75f48b46418075f1c9f40d4f2db87fc6d4fdedefab4e3c3ac3031e7acdf461b" exitCode=0 Sep 29 09:46:11 crc kubenswrapper[4779]: I0929 09:46:11.059439 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-dzjsq-config-gx6js" event={"ID":"bcc1ddb2-0140-4169-b209-ebf6d5833599","Type":"ContainerDied","Data":"c75f48b46418075f1c9f40d4f2db87fc6d4fdedefab4e3c3ac3031e7acdf461b"} Sep 29 09:46:13 crc kubenswrapper[4779]: I0929 09:46:13.076881 4779 generic.go:334] "Generic (PLEG): container finished" podID="1c5a9de7-c943-4654-bb1a-087fd1bb739e" containerID="125698833ede566b41210de4b766e59874ee11f0b7ab1025b0525519921f098f" exitCode=0 Sep 29 09:46:13 crc kubenswrapper[4779]: I0929 09:46:13.076982 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"1c5a9de7-c943-4654-bb1a-087fd1bb739e","Type":"ContainerDied","Data":"125698833ede566b41210de4b766e59874ee11f0b7ab1025b0525519921f098f"} Sep 29 09:46:13 crc kubenswrapper[4779]: I0929 09:46:13.939113 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-dzjsq" Sep 29 09:46:15 crc kubenswrapper[4779]: I0929 09:46:15.093227 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="8faade2a-9a07-45b9-99e4-b448b64afaaa" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.107:5671: connect: connection refused" Sep 29 09:46:15 crc kubenswrapper[4779]: I0929 09:46:15.382549 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="779f829e-6240-47a5-8d8d-9e279d316df7" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.108:5671: connect: connection refused" Sep 29 09:46:15 crc kubenswrapper[4779]: I0929 09:46:15.664285 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-notifications-server-0" Sep 29 09:46:16 crc kubenswrapper[4779]: I0929 09:46:16.966252 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 09:46:16 crc kubenswrapper[4779]: I0929 09:46:16.966321 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 09:46:17 crc kubenswrapper[4779]: I0929 09:46:17.743623 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-9c17-account-create-77v8l" Sep 29 09:46:17 crc kubenswrapper[4779]: I0929 09:46:17.758469 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-dzjsq-config-gx6js" Sep 29 09:46:17 crc kubenswrapper[4779]: I0929 09:46:17.779530 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-5200-account-create-lfnrs" Sep 29 09:46:17 crc kubenswrapper[4779]: I0929 09:46:17.833291 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p47kd\" (UniqueName: \"kubernetes.io/projected/8fdebe28-420d-4e9c-bc14-cd59fcb284bd-kube-api-access-p47kd\") pod \"8fdebe28-420d-4e9c-bc14-cd59fcb284bd\" (UID: \"8fdebe28-420d-4e9c-bc14-cd59fcb284bd\") " Sep 29 09:46:17 crc kubenswrapper[4779]: I0929 09:46:17.838159 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8fdebe28-420d-4e9c-bc14-cd59fcb284bd-kube-api-access-p47kd" (OuterVolumeSpecName: "kube-api-access-p47kd") pod "8fdebe28-420d-4e9c-bc14-cd59fcb284bd" (UID: "8fdebe28-420d-4e9c-bc14-cd59fcb284bd"). InnerVolumeSpecName "kube-api-access-p47kd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:46:17 crc kubenswrapper[4779]: I0929 09:46:17.936874 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/bcc1ddb2-0140-4169-b209-ebf6d5833599-var-run-ovn\") pod \"bcc1ddb2-0140-4169-b209-ebf6d5833599\" (UID: \"bcc1ddb2-0140-4169-b209-ebf6d5833599\") " Sep 29 09:46:17 crc kubenswrapper[4779]: I0929 09:46:17.937025 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hmklm\" (UniqueName: \"kubernetes.io/projected/bcc1ddb2-0140-4169-b209-ebf6d5833599-kube-api-access-hmklm\") pod \"bcc1ddb2-0140-4169-b209-ebf6d5833599\" (UID: \"bcc1ddb2-0140-4169-b209-ebf6d5833599\") " Sep 29 09:46:17 crc kubenswrapper[4779]: I0929 09:46:17.937072 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/bcc1ddb2-0140-4169-b209-ebf6d5833599-additional-scripts\") pod \"bcc1ddb2-0140-4169-b209-ebf6d5833599\" (UID: \"bcc1ddb2-0140-4169-b209-ebf6d5833599\") " Sep 29 09:46:17 crc kubenswrapper[4779]: I0929 09:46:17.937123 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tj46x\" (UniqueName: \"kubernetes.io/projected/2d815e2e-6933-4792-97ef-b22c5d1df8d0-kube-api-access-tj46x\") pod \"2d815e2e-6933-4792-97ef-b22c5d1df8d0\" (UID: \"2d815e2e-6933-4792-97ef-b22c5d1df8d0\") " Sep 29 09:46:17 crc kubenswrapper[4779]: I0929 09:46:17.937167 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/bcc1ddb2-0140-4169-b209-ebf6d5833599-var-log-ovn\") pod \"bcc1ddb2-0140-4169-b209-ebf6d5833599\" (UID: \"bcc1ddb2-0140-4169-b209-ebf6d5833599\") " Sep 29 09:46:17 crc kubenswrapper[4779]: I0929 09:46:17.937203 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bcc1ddb2-0140-4169-b209-ebf6d5833599-scripts\") pod \"bcc1ddb2-0140-4169-b209-ebf6d5833599\" (UID: \"bcc1ddb2-0140-4169-b209-ebf6d5833599\") " Sep 29 09:46:17 crc kubenswrapper[4779]: I0929 09:46:17.937244 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/bcc1ddb2-0140-4169-b209-ebf6d5833599-var-run\") pod \"bcc1ddb2-0140-4169-b209-ebf6d5833599\" (UID: \"bcc1ddb2-0140-4169-b209-ebf6d5833599\") " Sep 29 09:46:17 crc kubenswrapper[4779]: I0929 09:46:17.937640 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p47kd\" (UniqueName: \"kubernetes.io/projected/8fdebe28-420d-4e9c-bc14-cd59fcb284bd-kube-api-access-p47kd\") on node \"crc\" DevicePath \"\"" Sep 29 09:46:17 crc kubenswrapper[4779]: I0929 09:46:17.937694 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bcc1ddb2-0140-4169-b209-ebf6d5833599-var-run" (OuterVolumeSpecName: "var-run") pod "bcc1ddb2-0140-4169-b209-ebf6d5833599" (UID: "bcc1ddb2-0140-4169-b209-ebf6d5833599"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 09:46:17 crc kubenswrapper[4779]: I0929 09:46:17.937727 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bcc1ddb2-0140-4169-b209-ebf6d5833599-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "bcc1ddb2-0140-4169-b209-ebf6d5833599" (UID: "bcc1ddb2-0140-4169-b209-ebf6d5833599"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 09:46:17 crc kubenswrapper[4779]: I0929 09:46:17.938797 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bcc1ddb2-0140-4169-b209-ebf6d5833599-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "bcc1ddb2-0140-4169-b209-ebf6d5833599" (UID: "bcc1ddb2-0140-4169-b209-ebf6d5833599"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:46:17 crc kubenswrapper[4779]: I0929 09:46:17.938836 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bcc1ddb2-0140-4169-b209-ebf6d5833599-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "bcc1ddb2-0140-4169-b209-ebf6d5833599" (UID: "bcc1ddb2-0140-4169-b209-ebf6d5833599"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 09:46:17 crc kubenswrapper[4779]: I0929 09:46:17.940123 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bcc1ddb2-0140-4169-b209-ebf6d5833599-scripts" (OuterVolumeSpecName: "scripts") pod "bcc1ddb2-0140-4169-b209-ebf6d5833599" (UID: "bcc1ddb2-0140-4169-b209-ebf6d5833599"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:46:17 crc kubenswrapper[4779]: I0929 09:46:17.940415 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d815e2e-6933-4792-97ef-b22c5d1df8d0-kube-api-access-tj46x" (OuterVolumeSpecName: "kube-api-access-tj46x") pod "2d815e2e-6933-4792-97ef-b22c5d1df8d0" (UID: "2d815e2e-6933-4792-97ef-b22c5d1df8d0"). InnerVolumeSpecName "kube-api-access-tj46x". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:46:17 crc kubenswrapper[4779]: I0929 09:46:17.941625 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bcc1ddb2-0140-4169-b209-ebf6d5833599-kube-api-access-hmklm" (OuterVolumeSpecName: "kube-api-access-hmklm") pod "bcc1ddb2-0140-4169-b209-ebf6d5833599" (UID: "bcc1ddb2-0140-4169-b209-ebf6d5833599"). InnerVolumeSpecName "kube-api-access-hmklm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:46:18 crc kubenswrapper[4779]: I0929 09:46:18.038710 4779 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/bcc1ddb2-0140-4169-b209-ebf6d5833599-var-run-ovn\") on node \"crc\" DevicePath \"\"" Sep 29 09:46:18 crc kubenswrapper[4779]: I0929 09:46:18.039411 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hmklm\" (UniqueName: \"kubernetes.io/projected/bcc1ddb2-0140-4169-b209-ebf6d5833599-kube-api-access-hmklm\") on node \"crc\" DevicePath \"\"" Sep 29 09:46:18 crc kubenswrapper[4779]: I0929 09:46:18.039482 4779 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/bcc1ddb2-0140-4169-b209-ebf6d5833599-additional-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 09:46:18 crc kubenswrapper[4779]: I0929 09:46:18.039545 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tj46x\" (UniqueName: \"kubernetes.io/projected/2d815e2e-6933-4792-97ef-b22c5d1df8d0-kube-api-access-tj46x\") on node \"crc\" DevicePath \"\"" Sep 29 09:46:18 crc kubenswrapper[4779]: I0929 09:46:18.039609 4779 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/bcc1ddb2-0140-4169-b209-ebf6d5833599-var-log-ovn\") on node \"crc\" DevicePath \"\"" Sep 29 09:46:18 crc kubenswrapper[4779]: I0929 09:46:18.039673 4779 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bcc1ddb2-0140-4169-b209-ebf6d5833599-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 09:46:18 crc kubenswrapper[4779]: I0929 09:46:18.039733 4779 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/bcc1ddb2-0140-4169-b209-ebf6d5833599-var-run\") on node \"crc\" DevicePath \"\"" Sep 29 09:46:18 crc kubenswrapper[4779]: I0929 09:46:18.119109 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-5200-account-create-lfnrs" Sep 29 09:46:18 crc kubenswrapper[4779]: I0929 09:46:18.119110 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5200-account-create-lfnrs" event={"ID":"2d815e2e-6933-4792-97ef-b22c5d1df8d0","Type":"ContainerDied","Data":"15670ea1dc9566361ee7a5797c3daaec1143f1f259f8fb30513ff35ce0314402"} Sep 29 09:46:18 crc kubenswrapper[4779]: I0929 09:46:18.119247 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="15670ea1dc9566361ee7a5797c3daaec1143f1f259f8fb30513ff35ce0314402" Sep 29 09:46:18 crc kubenswrapper[4779]: I0929 09:46:18.122235 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-dzjsq-config-gx6js" event={"ID":"bcc1ddb2-0140-4169-b209-ebf6d5833599","Type":"ContainerDied","Data":"db5ff2ac213ec6b48c943568c21fc1ab327f440d0b0d1979603ab085dbb71fd5"} Sep 29 09:46:18 crc kubenswrapper[4779]: I0929 09:46:18.122354 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="db5ff2ac213ec6b48c943568c21fc1ab327f440d0b0d1979603ab085dbb71fd5" Sep 29 09:46:18 crc kubenswrapper[4779]: I0929 09:46:18.122580 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-dzjsq-config-gx6js" Sep 29 09:46:18 crc kubenswrapper[4779]: I0929 09:46:18.130350 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-9c17-account-create-77v8l" event={"ID":"8fdebe28-420d-4e9c-bc14-cd59fcb284bd","Type":"ContainerDied","Data":"c9837de769f4f93ebf534d7abf6bc6e1d98df01375acdc4ceb2f9bd81fcccaf2"} Sep 29 09:46:18 crc kubenswrapper[4779]: I0929 09:46:18.130398 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c9837de769f4f93ebf534d7abf6bc6e1d98df01375acdc4ceb2f9bd81fcccaf2" Sep 29 09:46:18 crc kubenswrapper[4779]: I0929 09:46:18.130468 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-9c17-account-create-77v8l" Sep 29 09:46:18 crc kubenswrapper[4779]: I0929 09:46:18.135077 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"1c5a9de7-c943-4654-bb1a-087fd1bb739e","Type":"ContainerStarted","Data":"4691ae1aafdb1775cb7f738fa4e5919e9ceaef81d7802b5bb89fcb24219d95ed"} Sep 29 09:46:18 crc kubenswrapper[4779]: I0929 09:46:18.870436 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-dzjsq-config-gx6js"] Sep 29 09:46:18 crc kubenswrapper[4779]: I0929 09:46:18.877036 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-dzjsq-config-gx6js"] Sep 29 09:46:19 crc kubenswrapper[4779]: I0929 09:46:19.152290 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-dnnxk" event={"ID":"b16b7fa7-e1c5-4368-9f6b-cc5a7526671f","Type":"ContainerStarted","Data":"6908d8bacc0ddb133fedc2bdef6d4b7d272243d33be84c58815b6cbd7259b19c"} Sep 29 09:46:20 crc kubenswrapper[4779]: I0929 09:46:20.723555 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bcc1ddb2-0140-4169-b209-ebf6d5833599" path="/var/lib/kubelet/pods/bcc1ddb2-0140-4169-b209-ebf6d5833599/volumes" Sep 29 09:46:21 crc kubenswrapper[4779]: I0929 09:46:21.168482 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"1c5a9de7-c943-4654-bb1a-087fd1bb739e","Type":"ContainerStarted","Data":"8e37cf67ecbe12050d8a7f3b49c88b07c480b057907ea7f37d1d6203cf26cb8a"} Sep 29 09:46:21 crc kubenswrapper[4779]: I0929 09:46:21.168522 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"1c5a9de7-c943-4654-bb1a-087fd1bb739e","Type":"ContainerStarted","Data":"d22c0e2ba105ddfc6f8935ba0c2d9739c12bb82a4acecb9a5fa0c89a9720d2c7"} Sep 29 09:46:21 crc kubenswrapper[4779]: I0929 09:46:21.198442 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=19.198423934 podStartE2EDuration="19.198423934s" podCreationTimestamp="2025-09-29 09:46:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:46:21.194709827 +0000 UTC m=+1013.176033751" watchObservedRunningTime="2025-09-29 09:46:21.198423934 +0000 UTC m=+1013.179747838" Sep 29 09:46:21 crc kubenswrapper[4779]: I0929 09:46:21.205476 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-dnnxk" podStartSLOduration=4.678030205 podStartE2EDuration="17.205452846s" podCreationTimestamp="2025-09-29 09:46:04 +0000 UTC" firstStartedPulling="2025-09-29 09:46:05.289965029 +0000 UTC m=+997.271288933" lastFinishedPulling="2025-09-29 09:46:17.81738767 +0000 UTC m=+1009.798711574" observedRunningTime="2025-09-29 09:46:19.175336483 +0000 UTC m=+1011.156660387" watchObservedRunningTime="2025-09-29 09:46:21.205452846 +0000 UTC m=+1013.186776750" Sep 29 09:46:22 crc kubenswrapper[4779]: I0929 09:46:22.549698 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Sep 29 09:46:25 crc kubenswrapper[4779]: I0929 09:46:25.093103 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Sep 29 09:46:25 crc kubenswrapper[4779]: I0929 09:46:25.383068 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:46:25 crc kubenswrapper[4779]: I0929 09:46:25.414558 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-c9wj9"] Sep 29 09:46:25 crc kubenswrapper[4779]: E0929 09:46:25.415022 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d815e2e-6933-4792-97ef-b22c5d1df8d0" containerName="mariadb-account-create" Sep 29 09:46:25 crc kubenswrapper[4779]: I0929 09:46:25.415047 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d815e2e-6933-4792-97ef-b22c5d1df8d0" containerName="mariadb-account-create" Sep 29 09:46:25 crc kubenswrapper[4779]: E0929 09:46:25.415066 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8fdebe28-420d-4e9c-bc14-cd59fcb284bd" containerName="mariadb-account-create" Sep 29 09:46:25 crc kubenswrapper[4779]: I0929 09:46:25.415074 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="8fdebe28-420d-4e9c-bc14-cd59fcb284bd" containerName="mariadb-account-create" Sep 29 09:46:25 crc kubenswrapper[4779]: E0929 09:46:25.415090 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bcc1ddb2-0140-4169-b209-ebf6d5833599" containerName="ovn-config" Sep 29 09:46:25 crc kubenswrapper[4779]: I0929 09:46:25.415097 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="bcc1ddb2-0140-4169-b209-ebf6d5833599" containerName="ovn-config" Sep 29 09:46:25 crc kubenswrapper[4779]: I0929 09:46:25.415274 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d815e2e-6933-4792-97ef-b22c5d1df8d0" containerName="mariadb-account-create" Sep 29 09:46:25 crc kubenswrapper[4779]: I0929 09:46:25.415288 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="8fdebe28-420d-4e9c-bc14-cd59fcb284bd" containerName="mariadb-account-create" Sep 29 09:46:25 crc kubenswrapper[4779]: I0929 09:46:25.415307 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="bcc1ddb2-0140-4169-b209-ebf6d5833599" containerName="ovn-config" Sep 29 09:46:25 crc kubenswrapper[4779]: I0929 09:46:25.415978 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-c9wj9" Sep 29 09:46:25 crc kubenswrapper[4779]: I0929 09:46:25.425644 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-c9wj9"] Sep 29 09:46:25 crc kubenswrapper[4779]: I0929 09:46:25.525018 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-g7g8x"] Sep 29 09:46:25 crc kubenswrapper[4779]: I0929 09:46:25.526360 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-g7g8x" Sep 29 09:46:25 crc kubenswrapper[4779]: I0929 09:46:25.536681 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-g7g8x"] Sep 29 09:46:25 crc kubenswrapper[4779]: I0929 09:46:25.582462 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tcpps\" (UniqueName: \"kubernetes.io/projected/1c3d7dc4-b829-4340-abb5-449b3dcd606f-kube-api-access-tcpps\") pod \"cinder-db-create-c9wj9\" (UID: \"1c3d7dc4-b829-4340-abb5-449b3dcd606f\") " pod="openstack/cinder-db-create-c9wj9" Sep 29 09:46:25 crc kubenswrapper[4779]: I0929 09:46:25.627794 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-db-sync-lg2fm"] Sep 29 09:46:25 crc kubenswrapper[4779]: I0929 09:46:25.628820 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-sync-lg2fm" Sep 29 09:46:25 crc kubenswrapper[4779]: I0929 09:46:25.630735 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-config-data" Sep 29 09:46:25 crc kubenswrapper[4779]: I0929 09:46:25.630962 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-watcher-dockercfg-264xg" Sep 29 09:46:25 crc kubenswrapper[4779]: I0929 09:46:25.638164 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-db-sync-lg2fm"] Sep 29 09:46:25 crc kubenswrapper[4779]: I0929 09:46:25.684031 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tcpps\" (UniqueName: \"kubernetes.io/projected/1c3d7dc4-b829-4340-abb5-449b3dcd606f-kube-api-access-tcpps\") pod \"cinder-db-create-c9wj9\" (UID: \"1c3d7dc4-b829-4340-abb5-449b3dcd606f\") " pod="openstack/cinder-db-create-c9wj9" Sep 29 09:46:25 crc kubenswrapper[4779]: I0929 09:46:25.684192 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7jvsk\" (UniqueName: \"kubernetes.io/projected/9775652b-71da-489b-9105-d795a7feb2cb-kube-api-access-7jvsk\") pod \"barbican-db-create-g7g8x\" (UID: \"9775652b-71da-489b-9105-d795a7feb2cb\") " pod="openstack/barbican-db-create-g7g8x" Sep 29 09:46:25 crc kubenswrapper[4779]: I0929 09:46:25.691102 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-4jkl4"] Sep 29 09:46:25 crc kubenswrapper[4779]: I0929 09:46:25.692143 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-4jkl4" Sep 29 09:46:25 crc kubenswrapper[4779]: I0929 09:46:25.695846 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 29 09:46:25 crc kubenswrapper[4779]: I0929 09:46:25.696032 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 29 09:46:25 crc kubenswrapper[4779]: I0929 09:46:25.696153 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-n7qtr" Sep 29 09:46:25 crc kubenswrapper[4779]: I0929 09:46:25.696653 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 29 09:46:25 crc kubenswrapper[4779]: I0929 09:46:25.714081 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-4jkl4"] Sep 29 09:46:25 crc kubenswrapper[4779]: I0929 09:46:25.717880 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tcpps\" (UniqueName: \"kubernetes.io/projected/1c3d7dc4-b829-4340-abb5-449b3dcd606f-kube-api-access-tcpps\") pod \"cinder-db-create-c9wj9\" (UID: \"1c3d7dc4-b829-4340-abb5-449b3dcd606f\") " pod="openstack/cinder-db-create-c9wj9" Sep 29 09:46:25 crc kubenswrapper[4779]: I0929 09:46:25.733503 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-c9wj9" Sep 29 09:46:25 crc kubenswrapper[4779]: I0929 09:46:25.785822 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mx2xb\" (UniqueName: \"kubernetes.io/projected/6eead9b6-ef70-4326-be2e-07fc0e22c444-kube-api-access-mx2xb\") pod \"keystone-db-sync-4jkl4\" (UID: \"6eead9b6-ef70-4326-be2e-07fc0e22c444\") " pod="openstack/keystone-db-sync-4jkl4" Sep 29 09:46:25 crc kubenswrapper[4779]: I0929 09:46:25.786244 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6eead9b6-ef70-4326-be2e-07fc0e22c444-combined-ca-bundle\") pod \"keystone-db-sync-4jkl4\" (UID: \"6eead9b6-ef70-4326-be2e-07fc0e22c444\") " pod="openstack/keystone-db-sync-4jkl4" Sep 29 09:46:25 crc kubenswrapper[4779]: I0929 09:46:25.786279 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fbef3a83-4b34-466e-895f-1005a824efc0-config-data\") pod \"watcher-db-sync-lg2fm\" (UID: \"fbef3a83-4b34-466e-895f-1005a824efc0\") " pod="openstack/watcher-db-sync-lg2fm" Sep 29 09:46:25 crc kubenswrapper[4779]: I0929 09:46:25.786309 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6eead9b6-ef70-4326-be2e-07fc0e22c444-config-data\") pod \"keystone-db-sync-4jkl4\" (UID: \"6eead9b6-ef70-4326-be2e-07fc0e22c444\") " pod="openstack/keystone-db-sync-4jkl4" Sep 29 09:46:25 crc kubenswrapper[4779]: I0929 09:46:25.786376 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7jvsk\" (UniqueName: \"kubernetes.io/projected/9775652b-71da-489b-9105-d795a7feb2cb-kube-api-access-7jvsk\") pod \"barbican-db-create-g7g8x\" (UID: \"9775652b-71da-489b-9105-d795a7feb2cb\") " pod="openstack/barbican-db-create-g7g8x" Sep 29 09:46:25 crc kubenswrapper[4779]: I0929 09:46:25.786404 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vlvqz\" (UniqueName: \"kubernetes.io/projected/fbef3a83-4b34-466e-895f-1005a824efc0-kube-api-access-vlvqz\") pod \"watcher-db-sync-lg2fm\" (UID: \"fbef3a83-4b34-466e-895f-1005a824efc0\") " pod="openstack/watcher-db-sync-lg2fm" Sep 29 09:46:25 crc kubenswrapper[4779]: I0929 09:46:25.786455 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbef3a83-4b34-466e-895f-1005a824efc0-combined-ca-bundle\") pod \"watcher-db-sync-lg2fm\" (UID: \"fbef3a83-4b34-466e-895f-1005a824efc0\") " pod="openstack/watcher-db-sync-lg2fm" Sep 29 09:46:25 crc kubenswrapper[4779]: I0929 09:46:25.786540 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/fbef3a83-4b34-466e-895f-1005a824efc0-db-sync-config-data\") pod \"watcher-db-sync-lg2fm\" (UID: \"fbef3a83-4b34-466e-895f-1005a824efc0\") " pod="openstack/watcher-db-sync-lg2fm" Sep 29 09:46:25 crc kubenswrapper[4779]: I0929 09:46:25.805850 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7jvsk\" (UniqueName: \"kubernetes.io/projected/9775652b-71da-489b-9105-d795a7feb2cb-kube-api-access-7jvsk\") pod \"barbican-db-create-g7g8x\" (UID: \"9775652b-71da-489b-9105-d795a7feb2cb\") " pod="openstack/barbican-db-create-g7g8x" Sep 29 09:46:25 crc kubenswrapper[4779]: I0929 09:46:25.807293 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-g94ck"] Sep 29 09:46:25 crc kubenswrapper[4779]: I0929 09:46:25.810216 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-g94ck" Sep 29 09:46:25 crc kubenswrapper[4779]: I0929 09:46:25.821598 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-g94ck"] Sep 29 09:46:25 crc kubenswrapper[4779]: I0929 09:46:25.839288 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-g7g8x" Sep 29 09:46:25 crc kubenswrapper[4779]: I0929 09:46:25.887730 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fbef3a83-4b34-466e-895f-1005a824efc0-config-data\") pod \"watcher-db-sync-lg2fm\" (UID: \"fbef3a83-4b34-466e-895f-1005a824efc0\") " pod="openstack/watcher-db-sync-lg2fm" Sep 29 09:46:25 crc kubenswrapper[4779]: I0929 09:46:25.887774 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6eead9b6-ef70-4326-be2e-07fc0e22c444-config-data\") pod \"keystone-db-sync-4jkl4\" (UID: \"6eead9b6-ef70-4326-be2e-07fc0e22c444\") " pod="openstack/keystone-db-sync-4jkl4" Sep 29 09:46:25 crc kubenswrapper[4779]: I0929 09:46:25.887814 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vlvqz\" (UniqueName: \"kubernetes.io/projected/fbef3a83-4b34-466e-895f-1005a824efc0-kube-api-access-vlvqz\") pod \"watcher-db-sync-lg2fm\" (UID: \"fbef3a83-4b34-466e-895f-1005a824efc0\") " pod="openstack/watcher-db-sync-lg2fm" Sep 29 09:46:25 crc kubenswrapper[4779]: I0929 09:46:25.887847 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbef3a83-4b34-466e-895f-1005a824efc0-combined-ca-bundle\") pod \"watcher-db-sync-lg2fm\" (UID: \"fbef3a83-4b34-466e-895f-1005a824efc0\") " pod="openstack/watcher-db-sync-lg2fm" Sep 29 09:46:25 crc kubenswrapper[4779]: I0929 09:46:25.887925 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sr4n6\" (UniqueName: \"kubernetes.io/projected/2bfe06c4-af74-448c-b10b-d2f7f62fc96a-kube-api-access-sr4n6\") pod \"neutron-db-create-g94ck\" (UID: \"2bfe06c4-af74-448c-b10b-d2f7f62fc96a\") " pod="openstack/neutron-db-create-g94ck" Sep 29 09:46:25 crc kubenswrapper[4779]: I0929 09:46:25.887950 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/fbef3a83-4b34-466e-895f-1005a824efc0-db-sync-config-data\") pod \"watcher-db-sync-lg2fm\" (UID: \"fbef3a83-4b34-466e-895f-1005a824efc0\") " pod="openstack/watcher-db-sync-lg2fm" Sep 29 09:46:25 crc kubenswrapper[4779]: I0929 09:46:25.887967 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mx2xb\" (UniqueName: \"kubernetes.io/projected/6eead9b6-ef70-4326-be2e-07fc0e22c444-kube-api-access-mx2xb\") pod \"keystone-db-sync-4jkl4\" (UID: \"6eead9b6-ef70-4326-be2e-07fc0e22c444\") " pod="openstack/keystone-db-sync-4jkl4" Sep 29 09:46:25 crc kubenswrapper[4779]: I0929 09:46:25.888014 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6eead9b6-ef70-4326-be2e-07fc0e22c444-combined-ca-bundle\") pod \"keystone-db-sync-4jkl4\" (UID: \"6eead9b6-ef70-4326-be2e-07fc0e22c444\") " pod="openstack/keystone-db-sync-4jkl4" Sep 29 09:46:25 crc kubenswrapper[4779]: I0929 09:46:25.898866 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6eead9b6-ef70-4326-be2e-07fc0e22c444-config-data\") pod \"keystone-db-sync-4jkl4\" (UID: \"6eead9b6-ef70-4326-be2e-07fc0e22c444\") " pod="openstack/keystone-db-sync-4jkl4" Sep 29 09:46:25 crc kubenswrapper[4779]: I0929 09:46:25.898876 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/fbef3a83-4b34-466e-895f-1005a824efc0-db-sync-config-data\") pod \"watcher-db-sync-lg2fm\" (UID: \"fbef3a83-4b34-466e-895f-1005a824efc0\") " pod="openstack/watcher-db-sync-lg2fm" Sep 29 09:46:25 crc kubenswrapper[4779]: I0929 09:46:25.902514 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fbef3a83-4b34-466e-895f-1005a824efc0-config-data\") pod \"watcher-db-sync-lg2fm\" (UID: \"fbef3a83-4b34-466e-895f-1005a824efc0\") " pod="openstack/watcher-db-sync-lg2fm" Sep 29 09:46:25 crc kubenswrapper[4779]: I0929 09:46:25.909757 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6eead9b6-ef70-4326-be2e-07fc0e22c444-combined-ca-bundle\") pod \"keystone-db-sync-4jkl4\" (UID: \"6eead9b6-ef70-4326-be2e-07fc0e22c444\") " pod="openstack/keystone-db-sync-4jkl4" Sep 29 09:46:25 crc kubenswrapper[4779]: I0929 09:46:25.912187 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vlvqz\" (UniqueName: \"kubernetes.io/projected/fbef3a83-4b34-466e-895f-1005a824efc0-kube-api-access-vlvqz\") pod \"watcher-db-sync-lg2fm\" (UID: \"fbef3a83-4b34-466e-895f-1005a824efc0\") " pod="openstack/watcher-db-sync-lg2fm" Sep 29 09:46:25 crc kubenswrapper[4779]: I0929 09:46:25.913830 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbef3a83-4b34-466e-895f-1005a824efc0-combined-ca-bundle\") pod \"watcher-db-sync-lg2fm\" (UID: \"fbef3a83-4b34-466e-895f-1005a824efc0\") " pod="openstack/watcher-db-sync-lg2fm" Sep 29 09:46:25 crc kubenswrapper[4779]: I0929 09:46:25.918114 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mx2xb\" (UniqueName: \"kubernetes.io/projected/6eead9b6-ef70-4326-be2e-07fc0e22c444-kube-api-access-mx2xb\") pod \"keystone-db-sync-4jkl4\" (UID: \"6eead9b6-ef70-4326-be2e-07fc0e22c444\") " pod="openstack/keystone-db-sync-4jkl4" Sep 29 09:46:25 crc kubenswrapper[4779]: I0929 09:46:25.945620 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-sync-lg2fm" Sep 29 09:46:25 crc kubenswrapper[4779]: I0929 09:46:25.997912 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sr4n6\" (UniqueName: \"kubernetes.io/projected/2bfe06c4-af74-448c-b10b-d2f7f62fc96a-kube-api-access-sr4n6\") pod \"neutron-db-create-g94ck\" (UID: \"2bfe06c4-af74-448c-b10b-d2f7f62fc96a\") " pod="openstack/neutron-db-create-g94ck" Sep 29 09:46:26 crc kubenswrapper[4779]: I0929 09:46:26.009972 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-4jkl4" Sep 29 09:46:26 crc kubenswrapper[4779]: I0929 09:46:26.020762 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sr4n6\" (UniqueName: \"kubernetes.io/projected/2bfe06c4-af74-448c-b10b-d2f7f62fc96a-kube-api-access-sr4n6\") pod \"neutron-db-create-g94ck\" (UID: \"2bfe06c4-af74-448c-b10b-d2f7f62fc96a\") " pod="openstack/neutron-db-create-g94ck" Sep 29 09:46:26 crc kubenswrapper[4779]: I0929 09:46:26.187197 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-g94ck" Sep 29 09:46:26 crc kubenswrapper[4779]: I0929 09:46:26.261760 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-c9wj9"] Sep 29 09:46:26 crc kubenswrapper[4779]: I0929 09:46:26.427653 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-g7g8x"] Sep 29 09:46:26 crc kubenswrapper[4779]: I0929 09:46:26.551507 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-db-sync-lg2fm"] Sep 29 09:46:26 crc kubenswrapper[4779]: I0929 09:46:26.572662 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-4jkl4"] Sep 29 09:46:26 crc kubenswrapper[4779]: W0929 09:46:26.572753 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfbef3a83_4b34_466e_895f_1005a824efc0.slice/crio-8b743d9a64686019e33698c69e31bc2763091f035c447e6786a32acb4b490daa WatchSource:0}: Error finding container 8b743d9a64686019e33698c69e31bc2763091f035c447e6786a32acb4b490daa: Status 404 returned error can't find the container with id 8b743d9a64686019e33698c69e31bc2763091f035c447e6786a32acb4b490daa Sep 29 09:46:26 crc kubenswrapper[4779]: W0929 09:46:26.586507 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6eead9b6_ef70_4326_be2e_07fc0e22c444.slice/crio-6c8fb643a56686d48b5961b798ee6fd2ef72eca1b820390f8acc9eb93818db6c WatchSource:0}: Error finding container 6c8fb643a56686d48b5961b798ee6fd2ef72eca1b820390f8acc9eb93818db6c: Status 404 returned error can't find the container with id 6c8fb643a56686d48b5961b798ee6fd2ef72eca1b820390f8acc9eb93818db6c Sep 29 09:46:26 crc kubenswrapper[4779]: E0929 09:46:26.681969 4779 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.30:58212->38.102.83.30:34385: write tcp 38.102.83.30:58212->38.102.83.30:34385: write: connection reset by peer Sep 29 09:46:26 crc kubenswrapper[4779]: I0929 09:46:26.768391 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-g94ck"] Sep 29 09:46:26 crc kubenswrapper[4779]: W0929 09:46:26.776475 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2bfe06c4_af74_448c_b10b_d2f7f62fc96a.slice/crio-f97dee085adadd8accf554e9c26c84c6fb25cec911f9ce089fed1e26c39049bb WatchSource:0}: Error finding container f97dee085adadd8accf554e9c26c84c6fb25cec911f9ce089fed1e26c39049bb: Status 404 returned error can't find the container with id f97dee085adadd8accf554e9c26c84c6fb25cec911f9ce089fed1e26c39049bb Sep 29 09:46:27 crc kubenswrapper[4779]: I0929 09:46:27.221593 4779 generic.go:334] "Generic (PLEG): container finished" podID="2bfe06c4-af74-448c-b10b-d2f7f62fc96a" containerID="85715d7ff260c61df401585a2352a5f5a64c7be90f3ce8200620d79fabccbd58" exitCode=0 Sep 29 09:46:27 crc kubenswrapper[4779]: I0929 09:46:27.221695 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-g94ck" event={"ID":"2bfe06c4-af74-448c-b10b-d2f7f62fc96a","Type":"ContainerDied","Data":"85715d7ff260c61df401585a2352a5f5a64c7be90f3ce8200620d79fabccbd58"} Sep 29 09:46:27 crc kubenswrapper[4779]: I0929 09:46:27.221928 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-g94ck" event={"ID":"2bfe06c4-af74-448c-b10b-d2f7f62fc96a","Type":"ContainerStarted","Data":"f97dee085adadd8accf554e9c26c84c6fb25cec911f9ce089fed1e26c39049bb"} Sep 29 09:46:27 crc kubenswrapper[4779]: I0929 09:46:27.223092 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-4jkl4" event={"ID":"6eead9b6-ef70-4326-be2e-07fc0e22c444","Type":"ContainerStarted","Data":"6c8fb643a56686d48b5961b798ee6fd2ef72eca1b820390f8acc9eb93818db6c"} Sep 29 09:46:27 crc kubenswrapper[4779]: I0929 09:46:27.225111 4779 generic.go:334] "Generic (PLEG): container finished" podID="1c3d7dc4-b829-4340-abb5-449b3dcd606f" containerID="1ca1dc776d47cb3b2b721ac48874f0d2dda8e5b90f422ca6950cd1bec6581c97" exitCode=0 Sep 29 09:46:27 crc kubenswrapper[4779]: I0929 09:46:27.225174 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-c9wj9" event={"ID":"1c3d7dc4-b829-4340-abb5-449b3dcd606f","Type":"ContainerDied","Data":"1ca1dc776d47cb3b2b721ac48874f0d2dda8e5b90f422ca6950cd1bec6581c97"} Sep 29 09:46:27 crc kubenswrapper[4779]: I0929 09:46:27.225197 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-c9wj9" event={"ID":"1c3d7dc4-b829-4340-abb5-449b3dcd606f","Type":"ContainerStarted","Data":"26319ecd013b005d9e1db47762ace37f0a26202d24e67d92630b15efa80a2aa8"} Sep 29 09:46:27 crc kubenswrapper[4779]: I0929 09:46:27.226092 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-sync-lg2fm" event={"ID":"fbef3a83-4b34-466e-895f-1005a824efc0","Type":"ContainerStarted","Data":"8b743d9a64686019e33698c69e31bc2763091f035c447e6786a32acb4b490daa"} Sep 29 09:46:27 crc kubenswrapper[4779]: I0929 09:46:27.231435 4779 generic.go:334] "Generic (PLEG): container finished" podID="b16b7fa7-e1c5-4368-9f6b-cc5a7526671f" containerID="6908d8bacc0ddb133fedc2bdef6d4b7d272243d33be84c58815b6cbd7259b19c" exitCode=0 Sep 29 09:46:27 crc kubenswrapper[4779]: I0929 09:46:27.231547 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-dnnxk" event={"ID":"b16b7fa7-e1c5-4368-9f6b-cc5a7526671f","Type":"ContainerDied","Data":"6908d8bacc0ddb133fedc2bdef6d4b7d272243d33be84c58815b6cbd7259b19c"} Sep 29 09:46:27 crc kubenswrapper[4779]: I0929 09:46:27.232934 4779 generic.go:334] "Generic (PLEG): container finished" podID="9775652b-71da-489b-9105-d795a7feb2cb" containerID="8453735f3ea9dbaa70d50cbcb969524425033812b9026e34fb1b6283314c9d17" exitCode=0 Sep 29 09:46:27 crc kubenswrapper[4779]: I0929 09:46:27.232972 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-g7g8x" event={"ID":"9775652b-71da-489b-9105-d795a7feb2cb","Type":"ContainerDied","Data":"8453735f3ea9dbaa70d50cbcb969524425033812b9026e34fb1b6283314c9d17"} Sep 29 09:46:27 crc kubenswrapper[4779]: I0929 09:46:27.232994 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-g7g8x" event={"ID":"9775652b-71da-489b-9105-d795a7feb2cb","Type":"ContainerStarted","Data":"ef9a65475ef6742bf36dd58af93bcbd4bb65405686c3210fc7206b2a57c03bcf"} Sep 29 09:46:32 crc kubenswrapper[4779]: I0929 09:46:32.282104 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-g7g8x" event={"ID":"9775652b-71da-489b-9105-d795a7feb2cb","Type":"ContainerDied","Data":"ef9a65475ef6742bf36dd58af93bcbd4bb65405686c3210fc7206b2a57c03bcf"} Sep 29 09:46:32 crc kubenswrapper[4779]: I0929 09:46:32.282369 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ef9a65475ef6742bf36dd58af93bcbd4bb65405686c3210fc7206b2a57c03bcf" Sep 29 09:46:32 crc kubenswrapper[4779]: I0929 09:46:32.282326 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-c9wj9" Sep 29 09:46:32 crc kubenswrapper[4779]: I0929 09:46:32.284684 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-c9wj9" event={"ID":"1c3d7dc4-b829-4340-abb5-449b3dcd606f","Type":"ContainerDied","Data":"26319ecd013b005d9e1db47762ace37f0a26202d24e67d92630b15efa80a2aa8"} Sep 29 09:46:32 crc kubenswrapper[4779]: I0929 09:46:32.284714 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="26319ecd013b005d9e1db47762ace37f0a26202d24e67d92630b15efa80a2aa8" Sep 29 09:46:32 crc kubenswrapper[4779]: I0929 09:46:32.293967 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-g7g8x" Sep 29 09:46:32 crc kubenswrapper[4779]: I0929 09:46:32.407324 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7jvsk\" (UniqueName: \"kubernetes.io/projected/9775652b-71da-489b-9105-d795a7feb2cb-kube-api-access-7jvsk\") pod \"9775652b-71da-489b-9105-d795a7feb2cb\" (UID: \"9775652b-71da-489b-9105-d795a7feb2cb\") " Sep 29 09:46:32 crc kubenswrapper[4779]: I0929 09:46:32.407535 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tcpps\" (UniqueName: \"kubernetes.io/projected/1c3d7dc4-b829-4340-abb5-449b3dcd606f-kube-api-access-tcpps\") pod \"1c3d7dc4-b829-4340-abb5-449b3dcd606f\" (UID: \"1c3d7dc4-b829-4340-abb5-449b3dcd606f\") " Sep 29 09:46:32 crc kubenswrapper[4779]: I0929 09:46:32.413751 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9775652b-71da-489b-9105-d795a7feb2cb-kube-api-access-7jvsk" (OuterVolumeSpecName: "kube-api-access-7jvsk") pod "9775652b-71da-489b-9105-d795a7feb2cb" (UID: "9775652b-71da-489b-9105-d795a7feb2cb"). InnerVolumeSpecName "kube-api-access-7jvsk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:46:32 crc kubenswrapper[4779]: I0929 09:46:32.426937 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c3d7dc4-b829-4340-abb5-449b3dcd606f-kube-api-access-tcpps" (OuterVolumeSpecName: "kube-api-access-tcpps") pod "1c3d7dc4-b829-4340-abb5-449b3dcd606f" (UID: "1c3d7dc4-b829-4340-abb5-449b3dcd606f"). InnerVolumeSpecName "kube-api-access-tcpps". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:46:32 crc kubenswrapper[4779]: I0929 09:46:32.508923 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tcpps\" (UniqueName: \"kubernetes.io/projected/1c3d7dc4-b829-4340-abb5-449b3dcd606f-kube-api-access-tcpps\") on node \"crc\" DevicePath \"\"" Sep 29 09:46:32 crc kubenswrapper[4779]: I0929 09:46:32.508951 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7jvsk\" (UniqueName: \"kubernetes.io/projected/9775652b-71da-489b-9105-d795a7feb2cb-kube-api-access-7jvsk\") on node \"crc\" DevicePath \"\"" Sep 29 09:46:32 crc kubenswrapper[4779]: I0929 09:46:32.549083 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Sep 29 09:46:32 crc kubenswrapper[4779]: I0929 09:46:32.556087 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Sep 29 09:46:33 crc kubenswrapper[4779]: I0929 09:46:33.293088 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-c9wj9" Sep 29 09:46:33 crc kubenswrapper[4779]: I0929 09:46:33.293133 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-g7g8x" Sep 29 09:46:33 crc kubenswrapper[4779]: I0929 09:46:33.299729 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Sep 29 09:46:35 crc kubenswrapper[4779]: I0929 09:46:35.549166 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-421c-account-create-hf6f2"] Sep 29 09:46:35 crc kubenswrapper[4779]: E0929 09:46:35.549538 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9775652b-71da-489b-9105-d795a7feb2cb" containerName="mariadb-database-create" Sep 29 09:46:35 crc kubenswrapper[4779]: I0929 09:46:35.549553 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="9775652b-71da-489b-9105-d795a7feb2cb" containerName="mariadb-database-create" Sep 29 09:46:35 crc kubenswrapper[4779]: E0929 09:46:35.549573 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c3d7dc4-b829-4340-abb5-449b3dcd606f" containerName="mariadb-database-create" Sep 29 09:46:35 crc kubenswrapper[4779]: I0929 09:46:35.549581 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c3d7dc4-b829-4340-abb5-449b3dcd606f" containerName="mariadb-database-create" Sep 29 09:46:35 crc kubenswrapper[4779]: I0929 09:46:35.549804 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c3d7dc4-b829-4340-abb5-449b3dcd606f" containerName="mariadb-database-create" Sep 29 09:46:35 crc kubenswrapper[4779]: I0929 09:46:35.549834 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="9775652b-71da-489b-9105-d795a7feb2cb" containerName="mariadb-database-create" Sep 29 09:46:35 crc kubenswrapper[4779]: I0929 09:46:35.550472 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-421c-account-create-hf6f2" Sep 29 09:46:35 crc kubenswrapper[4779]: I0929 09:46:35.552557 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Sep 29 09:46:35 crc kubenswrapper[4779]: I0929 09:46:35.560879 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-421c-account-create-hf6f2"] Sep 29 09:46:35 crc kubenswrapper[4779]: I0929 09:46:35.582717 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-dnnxk" Sep 29 09:46:35 crc kubenswrapper[4779]: I0929 09:46:35.590436 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-g94ck" Sep 29 09:46:35 crc kubenswrapper[4779]: I0929 09:46:35.671179 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8vkf2\" (UniqueName: \"kubernetes.io/projected/b16b7fa7-e1c5-4368-9f6b-cc5a7526671f-kube-api-access-8vkf2\") pod \"b16b7fa7-e1c5-4368-9f6b-cc5a7526671f\" (UID: \"b16b7fa7-e1c5-4368-9f6b-cc5a7526671f\") " Sep 29 09:46:35 crc kubenswrapper[4779]: I0929 09:46:35.671258 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b16b7fa7-e1c5-4368-9f6b-cc5a7526671f-combined-ca-bundle\") pod \"b16b7fa7-e1c5-4368-9f6b-cc5a7526671f\" (UID: \"b16b7fa7-e1c5-4368-9f6b-cc5a7526671f\") " Sep 29 09:46:35 crc kubenswrapper[4779]: I0929 09:46:35.671343 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b16b7fa7-e1c5-4368-9f6b-cc5a7526671f-db-sync-config-data\") pod \"b16b7fa7-e1c5-4368-9f6b-cc5a7526671f\" (UID: \"b16b7fa7-e1c5-4368-9f6b-cc5a7526671f\") " Sep 29 09:46:35 crc kubenswrapper[4779]: I0929 09:46:35.671397 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sr4n6\" (UniqueName: \"kubernetes.io/projected/2bfe06c4-af74-448c-b10b-d2f7f62fc96a-kube-api-access-sr4n6\") pod \"2bfe06c4-af74-448c-b10b-d2f7f62fc96a\" (UID: \"2bfe06c4-af74-448c-b10b-d2f7f62fc96a\") " Sep 29 09:46:35 crc kubenswrapper[4779]: I0929 09:46:35.671456 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b16b7fa7-e1c5-4368-9f6b-cc5a7526671f-config-data\") pod \"b16b7fa7-e1c5-4368-9f6b-cc5a7526671f\" (UID: \"b16b7fa7-e1c5-4368-9f6b-cc5a7526671f\") " Sep 29 09:46:35 crc kubenswrapper[4779]: I0929 09:46:35.671775 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x5mvm\" (UniqueName: \"kubernetes.io/projected/8687a876-1897-472b-9a43-dc6ab0fb034a-kube-api-access-x5mvm\") pod \"cinder-421c-account-create-hf6f2\" (UID: \"8687a876-1897-472b-9a43-dc6ab0fb034a\") " pod="openstack/cinder-421c-account-create-hf6f2" Sep 29 09:46:35 crc kubenswrapper[4779]: I0929 09:46:35.677985 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2bfe06c4-af74-448c-b10b-d2f7f62fc96a-kube-api-access-sr4n6" (OuterVolumeSpecName: "kube-api-access-sr4n6") pod "2bfe06c4-af74-448c-b10b-d2f7f62fc96a" (UID: "2bfe06c4-af74-448c-b10b-d2f7f62fc96a"). InnerVolumeSpecName "kube-api-access-sr4n6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:46:35 crc kubenswrapper[4779]: I0929 09:46:35.678365 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b16b7fa7-e1c5-4368-9f6b-cc5a7526671f-kube-api-access-8vkf2" (OuterVolumeSpecName: "kube-api-access-8vkf2") pod "b16b7fa7-e1c5-4368-9f6b-cc5a7526671f" (UID: "b16b7fa7-e1c5-4368-9f6b-cc5a7526671f"). InnerVolumeSpecName "kube-api-access-8vkf2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:46:35 crc kubenswrapper[4779]: I0929 09:46:35.682833 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b16b7fa7-e1c5-4368-9f6b-cc5a7526671f-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "b16b7fa7-e1c5-4368-9f6b-cc5a7526671f" (UID: "b16b7fa7-e1c5-4368-9f6b-cc5a7526671f"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:46:35 crc kubenswrapper[4779]: I0929 09:46:35.700504 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b16b7fa7-e1c5-4368-9f6b-cc5a7526671f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b16b7fa7-e1c5-4368-9f6b-cc5a7526671f" (UID: "b16b7fa7-e1c5-4368-9f6b-cc5a7526671f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:46:35 crc kubenswrapper[4779]: I0929 09:46:35.720527 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b16b7fa7-e1c5-4368-9f6b-cc5a7526671f-config-data" (OuterVolumeSpecName: "config-data") pod "b16b7fa7-e1c5-4368-9f6b-cc5a7526671f" (UID: "b16b7fa7-e1c5-4368-9f6b-cc5a7526671f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:46:35 crc kubenswrapper[4779]: I0929 09:46:35.773733 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x5mvm\" (UniqueName: \"kubernetes.io/projected/8687a876-1897-472b-9a43-dc6ab0fb034a-kube-api-access-x5mvm\") pod \"cinder-421c-account-create-hf6f2\" (UID: \"8687a876-1897-472b-9a43-dc6ab0fb034a\") " pod="openstack/cinder-421c-account-create-hf6f2" Sep 29 09:46:35 crc kubenswrapper[4779]: I0929 09:46:35.774539 4779 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b16b7fa7-e1c5-4368-9f6b-cc5a7526671f-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 09:46:35 crc kubenswrapper[4779]: I0929 09:46:35.775079 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sr4n6\" (UniqueName: \"kubernetes.io/projected/2bfe06c4-af74-448c-b10b-d2f7f62fc96a-kube-api-access-sr4n6\") on node \"crc\" DevicePath \"\"" Sep 29 09:46:35 crc kubenswrapper[4779]: I0929 09:46:35.775122 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b16b7fa7-e1c5-4368-9f6b-cc5a7526671f-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 09:46:35 crc kubenswrapper[4779]: I0929 09:46:35.775141 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8vkf2\" (UniqueName: \"kubernetes.io/projected/b16b7fa7-e1c5-4368-9f6b-cc5a7526671f-kube-api-access-8vkf2\") on node \"crc\" DevicePath \"\"" Sep 29 09:46:35 crc kubenswrapper[4779]: I0929 09:46:35.775153 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b16b7fa7-e1c5-4368-9f6b-cc5a7526671f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 09:46:35 crc kubenswrapper[4779]: I0929 09:46:35.791296 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x5mvm\" (UniqueName: \"kubernetes.io/projected/8687a876-1897-472b-9a43-dc6ab0fb034a-kube-api-access-x5mvm\") pod \"cinder-421c-account-create-hf6f2\" (UID: \"8687a876-1897-472b-9a43-dc6ab0fb034a\") " pod="openstack/cinder-421c-account-create-hf6f2" Sep 29 09:46:35 crc kubenswrapper[4779]: I0929 09:46:35.905105 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-421c-account-create-hf6f2" Sep 29 09:46:36 crc kubenswrapper[4779]: I0929 09:46:36.319611 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-dnnxk" event={"ID":"b16b7fa7-e1c5-4368-9f6b-cc5a7526671f","Type":"ContainerDied","Data":"493338a2ae3c6b5b2f64513c6b9edfeaf4841183ef551f7399e9f98573c92ead"} Sep 29 09:46:36 crc kubenswrapper[4779]: I0929 09:46:36.319931 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="493338a2ae3c6b5b2f64513c6b9edfeaf4841183ef551f7399e9f98573c92ead" Sep 29 09:46:36 crc kubenswrapper[4779]: I0929 09:46:36.319631 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-dnnxk" Sep 29 09:46:36 crc kubenswrapper[4779]: I0929 09:46:36.321239 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-g94ck" Sep 29 09:46:36 crc kubenswrapper[4779]: I0929 09:46:36.321250 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-g94ck" event={"ID":"2bfe06c4-af74-448c-b10b-d2f7f62fc96a","Type":"ContainerDied","Data":"f97dee085adadd8accf554e9c26c84c6fb25cec911f9ce089fed1e26c39049bb"} Sep 29 09:46:36 crc kubenswrapper[4779]: I0929 09:46:36.321283 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f97dee085adadd8accf554e9c26c84c6fb25cec911f9ce089fed1e26c39049bb" Sep 29 09:46:36 crc kubenswrapper[4779]: I0929 09:46:36.323115 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-4jkl4" event={"ID":"6eead9b6-ef70-4326-be2e-07fc0e22c444","Type":"ContainerStarted","Data":"bd27468f74c9b9abf433abbfac322209d678e5c83e82d3f76d8091d675f36e13"} Sep 29 09:46:36 crc kubenswrapper[4779]: I0929 09:46:36.325135 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-sync-lg2fm" event={"ID":"fbef3a83-4b34-466e-895f-1005a824efc0","Type":"ContainerStarted","Data":"c3cbd235fb5bfce2b581612ad9c8e12fb5b750d27620881f19a59f849c745198"} Sep 29 09:46:36 crc kubenswrapper[4779]: I0929 09:46:36.370151 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-4jkl4" podStartSLOduration=1.9765624179999999 podStartE2EDuration="11.370108515s" podCreationTimestamp="2025-09-29 09:46:25 +0000 UTC" firstStartedPulling="2025-09-29 09:46:26.590186782 +0000 UTC m=+1018.571510686" lastFinishedPulling="2025-09-29 09:46:35.983732879 +0000 UTC m=+1027.965056783" observedRunningTime="2025-09-29 09:46:36.34429257 +0000 UTC m=+1028.325616474" watchObservedRunningTime="2025-09-29 09:46:36.370108515 +0000 UTC m=+1028.351432459" Sep 29 09:46:36 crc kubenswrapper[4779]: I0929 09:46:36.374001 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-db-sync-lg2fm" podStartSLOduration=1.933717871 podStartE2EDuration="11.373985157s" podCreationTimestamp="2025-09-29 09:46:25 +0000 UTC" firstStartedPulling="2025-09-29 09:46:26.575171908 +0000 UTC m=+1018.556495802" lastFinishedPulling="2025-09-29 09:46:36.015439184 +0000 UTC m=+1027.996763088" observedRunningTime="2025-09-29 09:46:36.367071648 +0000 UTC m=+1028.348395552" watchObservedRunningTime="2025-09-29 09:46:36.373985157 +0000 UTC m=+1028.355309121" Sep 29 09:46:36 crc kubenswrapper[4779]: I0929 09:46:36.419944 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-421c-account-create-hf6f2"] Sep 29 09:46:36 crc kubenswrapper[4779]: I0929 09:46:36.981038 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-77ddf8cf97-trh94"] Sep 29 09:46:36 crc kubenswrapper[4779]: E0929 09:46:36.982272 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2bfe06c4-af74-448c-b10b-d2f7f62fc96a" containerName="mariadb-database-create" Sep 29 09:46:36 crc kubenswrapper[4779]: I0929 09:46:36.982292 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="2bfe06c4-af74-448c-b10b-d2f7f62fc96a" containerName="mariadb-database-create" Sep 29 09:46:36 crc kubenswrapper[4779]: E0929 09:46:36.982681 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b16b7fa7-e1c5-4368-9f6b-cc5a7526671f" containerName="glance-db-sync" Sep 29 09:46:36 crc kubenswrapper[4779]: I0929 09:46:36.982695 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="b16b7fa7-e1c5-4368-9f6b-cc5a7526671f" containerName="glance-db-sync" Sep 29 09:46:36 crc kubenswrapper[4779]: I0929 09:46:36.982968 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="b16b7fa7-e1c5-4368-9f6b-cc5a7526671f" containerName="glance-db-sync" Sep 29 09:46:36 crc kubenswrapper[4779]: I0929 09:46:36.983002 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="2bfe06c4-af74-448c-b10b-d2f7f62fc96a" containerName="mariadb-database-create" Sep 29 09:46:36 crc kubenswrapper[4779]: I0929 09:46:36.984561 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77ddf8cf97-trh94" Sep 29 09:46:36 crc kubenswrapper[4779]: I0929 09:46:36.990850 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77ddf8cf97-trh94"] Sep 29 09:46:37 crc kubenswrapper[4779]: I0929 09:46:37.098578 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3-ovsdbserver-sb\") pod \"dnsmasq-dns-77ddf8cf97-trh94\" (UID: \"199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3\") " pod="openstack/dnsmasq-dns-77ddf8cf97-trh94" Sep 29 09:46:37 crc kubenswrapper[4779]: I0929 09:46:37.098878 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-flpvf\" (UniqueName: \"kubernetes.io/projected/199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3-kube-api-access-flpvf\") pod \"dnsmasq-dns-77ddf8cf97-trh94\" (UID: \"199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3\") " pod="openstack/dnsmasq-dns-77ddf8cf97-trh94" Sep 29 09:46:37 crc kubenswrapper[4779]: I0929 09:46:37.098963 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3-dns-svc\") pod \"dnsmasq-dns-77ddf8cf97-trh94\" (UID: \"199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3\") " pod="openstack/dnsmasq-dns-77ddf8cf97-trh94" Sep 29 09:46:37 crc kubenswrapper[4779]: I0929 09:46:37.099162 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3-config\") pod \"dnsmasq-dns-77ddf8cf97-trh94\" (UID: \"199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3\") " pod="openstack/dnsmasq-dns-77ddf8cf97-trh94" Sep 29 09:46:37 crc kubenswrapper[4779]: I0929 09:46:37.099226 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3-ovsdbserver-nb\") pod \"dnsmasq-dns-77ddf8cf97-trh94\" (UID: \"199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3\") " pod="openstack/dnsmasq-dns-77ddf8cf97-trh94" Sep 29 09:46:37 crc kubenswrapper[4779]: I0929 09:46:37.201308 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-flpvf\" (UniqueName: \"kubernetes.io/projected/199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3-kube-api-access-flpvf\") pod \"dnsmasq-dns-77ddf8cf97-trh94\" (UID: \"199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3\") " pod="openstack/dnsmasq-dns-77ddf8cf97-trh94" Sep 29 09:46:37 crc kubenswrapper[4779]: I0929 09:46:37.201358 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3-dns-svc\") pod \"dnsmasq-dns-77ddf8cf97-trh94\" (UID: \"199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3\") " pod="openstack/dnsmasq-dns-77ddf8cf97-trh94" Sep 29 09:46:37 crc kubenswrapper[4779]: I0929 09:46:37.201437 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3-config\") pod \"dnsmasq-dns-77ddf8cf97-trh94\" (UID: \"199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3\") " pod="openstack/dnsmasq-dns-77ddf8cf97-trh94" Sep 29 09:46:37 crc kubenswrapper[4779]: I0929 09:46:37.201475 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3-ovsdbserver-nb\") pod \"dnsmasq-dns-77ddf8cf97-trh94\" (UID: \"199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3\") " pod="openstack/dnsmasq-dns-77ddf8cf97-trh94" Sep 29 09:46:37 crc kubenswrapper[4779]: I0929 09:46:37.201500 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3-ovsdbserver-sb\") pod \"dnsmasq-dns-77ddf8cf97-trh94\" (UID: \"199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3\") " pod="openstack/dnsmasq-dns-77ddf8cf97-trh94" Sep 29 09:46:37 crc kubenswrapper[4779]: I0929 09:46:37.202888 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3-ovsdbserver-sb\") pod \"dnsmasq-dns-77ddf8cf97-trh94\" (UID: \"199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3\") " pod="openstack/dnsmasq-dns-77ddf8cf97-trh94" Sep 29 09:46:37 crc kubenswrapper[4779]: I0929 09:46:37.202930 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3-dns-svc\") pod \"dnsmasq-dns-77ddf8cf97-trh94\" (UID: \"199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3\") " pod="openstack/dnsmasq-dns-77ddf8cf97-trh94" Sep 29 09:46:37 crc kubenswrapper[4779]: I0929 09:46:37.202885 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3-config\") pod \"dnsmasq-dns-77ddf8cf97-trh94\" (UID: \"199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3\") " pod="openstack/dnsmasq-dns-77ddf8cf97-trh94" Sep 29 09:46:37 crc kubenswrapper[4779]: I0929 09:46:37.202885 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3-ovsdbserver-nb\") pod \"dnsmasq-dns-77ddf8cf97-trh94\" (UID: \"199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3\") " pod="openstack/dnsmasq-dns-77ddf8cf97-trh94" Sep 29 09:46:37 crc kubenswrapper[4779]: I0929 09:46:37.219032 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-flpvf\" (UniqueName: \"kubernetes.io/projected/199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3-kube-api-access-flpvf\") pod \"dnsmasq-dns-77ddf8cf97-trh94\" (UID: \"199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3\") " pod="openstack/dnsmasq-dns-77ddf8cf97-trh94" Sep 29 09:46:37 crc kubenswrapper[4779]: I0929 09:46:37.304116 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77ddf8cf97-trh94" Sep 29 09:46:37 crc kubenswrapper[4779]: I0929 09:46:37.339370 4779 generic.go:334] "Generic (PLEG): container finished" podID="8687a876-1897-472b-9a43-dc6ab0fb034a" containerID="c37f17b2b19d62939c704152ebe4158ee7cf161fd0936e7e01c819d5b8d80d32" exitCode=0 Sep 29 09:46:37 crc kubenswrapper[4779]: I0929 09:46:37.339735 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-421c-account-create-hf6f2" event={"ID":"8687a876-1897-472b-9a43-dc6ab0fb034a","Type":"ContainerDied","Data":"c37f17b2b19d62939c704152ebe4158ee7cf161fd0936e7e01c819d5b8d80d32"} Sep 29 09:46:37 crc kubenswrapper[4779]: I0929 09:46:37.339798 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-421c-account-create-hf6f2" event={"ID":"8687a876-1897-472b-9a43-dc6ab0fb034a","Type":"ContainerStarted","Data":"8639d47bf9399d9dbd8b1fdfa532c924f9e0ad7cddd3c8da00fd57ede5546b85"} Sep 29 09:46:37 crc kubenswrapper[4779]: I0929 09:46:37.786436 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77ddf8cf97-trh94"] Sep 29 09:46:38 crc kubenswrapper[4779]: I0929 09:46:38.349595 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77ddf8cf97-trh94" event={"ID":"199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3","Type":"ContainerStarted","Data":"d507cbe2f967936ea6be5e1e5c09dd5023affde185b46a7c72a5e54c15671af0"} Sep 29 09:46:38 crc kubenswrapper[4779]: I0929 09:46:38.748277 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-421c-account-create-hf6f2" Sep 29 09:46:38 crc kubenswrapper[4779]: I0929 09:46:38.859713 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x5mvm\" (UniqueName: \"kubernetes.io/projected/8687a876-1897-472b-9a43-dc6ab0fb034a-kube-api-access-x5mvm\") pod \"8687a876-1897-472b-9a43-dc6ab0fb034a\" (UID: \"8687a876-1897-472b-9a43-dc6ab0fb034a\") " Sep 29 09:46:38 crc kubenswrapper[4779]: I0929 09:46:38.865227 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8687a876-1897-472b-9a43-dc6ab0fb034a-kube-api-access-x5mvm" (OuterVolumeSpecName: "kube-api-access-x5mvm") pod "8687a876-1897-472b-9a43-dc6ab0fb034a" (UID: "8687a876-1897-472b-9a43-dc6ab0fb034a"). InnerVolumeSpecName "kube-api-access-x5mvm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:46:38 crc kubenswrapper[4779]: I0929 09:46:38.961574 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x5mvm\" (UniqueName: \"kubernetes.io/projected/8687a876-1897-472b-9a43-dc6ab0fb034a-kube-api-access-x5mvm\") on node \"crc\" DevicePath \"\"" Sep 29 09:46:39 crc kubenswrapper[4779]: I0929 09:46:39.367894 4779 generic.go:334] "Generic (PLEG): container finished" podID="199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3" containerID="bef107173159ad405307da7fca2d3fef988314b0b9a4785e081478be765b5563" exitCode=0 Sep 29 09:46:39 crc kubenswrapper[4779]: I0929 09:46:39.368036 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77ddf8cf97-trh94" event={"ID":"199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3","Type":"ContainerDied","Data":"bef107173159ad405307da7fca2d3fef988314b0b9a4785e081478be765b5563"} Sep 29 09:46:39 crc kubenswrapper[4779]: I0929 09:46:39.371090 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-421c-account-create-hf6f2" event={"ID":"8687a876-1897-472b-9a43-dc6ab0fb034a","Type":"ContainerDied","Data":"8639d47bf9399d9dbd8b1fdfa532c924f9e0ad7cddd3c8da00fd57ede5546b85"} Sep 29 09:46:39 crc kubenswrapper[4779]: I0929 09:46:39.371128 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8639d47bf9399d9dbd8b1fdfa532c924f9e0ad7cddd3c8da00fd57ede5546b85" Sep 29 09:46:39 crc kubenswrapper[4779]: I0929 09:46:39.371183 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-421c-account-create-hf6f2" Sep 29 09:46:40 crc kubenswrapper[4779]: I0929 09:46:40.385192 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77ddf8cf97-trh94" event={"ID":"199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3","Type":"ContainerStarted","Data":"853ef1b1de7f54a794f2c6e75aa586ea99522467c55aa66554e714b1a82e6806"} Sep 29 09:46:40 crc kubenswrapper[4779]: I0929 09:46:40.386126 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-77ddf8cf97-trh94" Sep 29 09:46:40 crc kubenswrapper[4779]: I0929 09:46:40.387731 4779 generic.go:334] "Generic (PLEG): container finished" podID="fbef3a83-4b34-466e-895f-1005a824efc0" containerID="c3cbd235fb5bfce2b581612ad9c8e12fb5b750d27620881f19a59f849c745198" exitCode=0 Sep 29 09:46:40 crc kubenswrapper[4779]: I0929 09:46:40.387760 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-sync-lg2fm" event={"ID":"fbef3a83-4b34-466e-895f-1005a824efc0","Type":"ContainerDied","Data":"c3cbd235fb5bfce2b581612ad9c8e12fb5b750d27620881f19a59f849c745198"} Sep 29 09:46:40 crc kubenswrapper[4779]: I0929 09:46:40.408662 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-77ddf8cf97-trh94" podStartSLOduration=4.408641996 podStartE2EDuration="4.408641996s" podCreationTimestamp="2025-09-29 09:46:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:46:40.405102064 +0000 UTC m=+1032.386425968" watchObservedRunningTime="2025-09-29 09:46:40.408641996 +0000 UTC m=+1032.389965900" Sep 29 09:46:41 crc kubenswrapper[4779]: I0929 09:46:41.399998 4779 generic.go:334] "Generic (PLEG): container finished" podID="6eead9b6-ef70-4326-be2e-07fc0e22c444" containerID="bd27468f74c9b9abf433abbfac322209d678e5c83e82d3f76d8091d675f36e13" exitCode=0 Sep 29 09:46:41 crc kubenswrapper[4779]: I0929 09:46:41.400094 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-4jkl4" event={"ID":"6eead9b6-ef70-4326-be2e-07fc0e22c444","Type":"ContainerDied","Data":"bd27468f74c9b9abf433abbfac322209d678e5c83e82d3f76d8091d675f36e13"} Sep 29 09:46:41 crc kubenswrapper[4779]: I0929 09:46:41.784325 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-sync-lg2fm" Sep 29 09:46:41 crc kubenswrapper[4779]: I0929 09:46:41.920680 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vlvqz\" (UniqueName: \"kubernetes.io/projected/fbef3a83-4b34-466e-895f-1005a824efc0-kube-api-access-vlvqz\") pod \"fbef3a83-4b34-466e-895f-1005a824efc0\" (UID: \"fbef3a83-4b34-466e-895f-1005a824efc0\") " Sep 29 09:46:41 crc kubenswrapper[4779]: I0929 09:46:41.920745 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbef3a83-4b34-466e-895f-1005a824efc0-combined-ca-bundle\") pod \"fbef3a83-4b34-466e-895f-1005a824efc0\" (UID: \"fbef3a83-4b34-466e-895f-1005a824efc0\") " Sep 29 09:46:41 crc kubenswrapper[4779]: I0929 09:46:41.920826 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fbef3a83-4b34-466e-895f-1005a824efc0-config-data\") pod \"fbef3a83-4b34-466e-895f-1005a824efc0\" (UID: \"fbef3a83-4b34-466e-895f-1005a824efc0\") " Sep 29 09:46:41 crc kubenswrapper[4779]: I0929 09:46:41.920852 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/fbef3a83-4b34-466e-895f-1005a824efc0-db-sync-config-data\") pod \"fbef3a83-4b34-466e-895f-1005a824efc0\" (UID: \"fbef3a83-4b34-466e-895f-1005a824efc0\") " Sep 29 09:46:41 crc kubenswrapper[4779]: I0929 09:46:41.928941 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbef3a83-4b34-466e-895f-1005a824efc0-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "fbef3a83-4b34-466e-895f-1005a824efc0" (UID: "fbef3a83-4b34-466e-895f-1005a824efc0"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:46:41 crc kubenswrapper[4779]: I0929 09:46:41.928996 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fbef3a83-4b34-466e-895f-1005a824efc0-kube-api-access-vlvqz" (OuterVolumeSpecName: "kube-api-access-vlvqz") pod "fbef3a83-4b34-466e-895f-1005a824efc0" (UID: "fbef3a83-4b34-466e-895f-1005a824efc0"). InnerVolumeSpecName "kube-api-access-vlvqz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:46:41 crc kubenswrapper[4779]: I0929 09:46:41.957386 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbef3a83-4b34-466e-895f-1005a824efc0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fbef3a83-4b34-466e-895f-1005a824efc0" (UID: "fbef3a83-4b34-466e-895f-1005a824efc0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:46:41 crc kubenswrapper[4779]: I0929 09:46:41.965741 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbef3a83-4b34-466e-895f-1005a824efc0-config-data" (OuterVolumeSpecName: "config-data") pod "fbef3a83-4b34-466e-895f-1005a824efc0" (UID: "fbef3a83-4b34-466e-895f-1005a824efc0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:46:42 crc kubenswrapper[4779]: I0929 09:46:42.022775 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vlvqz\" (UniqueName: \"kubernetes.io/projected/fbef3a83-4b34-466e-895f-1005a824efc0-kube-api-access-vlvqz\") on node \"crc\" DevicePath \"\"" Sep 29 09:46:42 crc kubenswrapper[4779]: I0929 09:46:42.022820 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbef3a83-4b34-466e-895f-1005a824efc0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 09:46:42 crc kubenswrapper[4779]: I0929 09:46:42.022837 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fbef3a83-4b34-466e-895f-1005a824efc0-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 09:46:42 crc kubenswrapper[4779]: I0929 09:46:42.022853 4779 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/fbef3a83-4b34-466e-895f-1005a824efc0-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 09:46:42 crc kubenswrapper[4779]: I0929 09:46:42.420444 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-sync-lg2fm" Sep 29 09:46:42 crc kubenswrapper[4779]: I0929 09:46:42.420473 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-sync-lg2fm" event={"ID":"fbef3a83-4b34-466e-895f-1005a824efc0","Type":"ContainerDied","Data":"8b743d9a64686019e33698c69e31bc2763091f035c447e6786a32acb4b490daa"} Sep 29 09:46:42 crc kubenswrapper[4779]: I0929 09:46:42.421285 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8b743d9a64686019e33698c69e31bc2763091f035c447e6786a32acb4b490daa" Sep 29 09:46:42 crc kubenswrapper[4779]: I0929 09:46:42.781275 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-4jkl4" Sep 29 09:46:42 crc kubenswrapper[4779]: I0929 09:46:42.938375 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6eead9b6-ef70-4326-be2e-07fc0e22c444-config-data\") pod \"6eead9b6-ef70-4326-be2e-07fc0e22c444\" (UID: \"6eead9b6-ef70-4326-be2e-07fc0e22c444\") " Sep 29 09:46:42 crc kubenswrapper[4779]: I0929 09:46:42.938415 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mx2xb\" (UniqueName: \"kubernetes.io/projected/6eead9b6-ef70-4326-be2e-07fc0e22c444-kube-api-access-mx2xb\") pod \"6eead9b6-ef70-4326-be2e-07fc0e22c444\" (UID: \"6eead9b6-ef70-4326-be2e-07fc0e22c444\") " Sep 29 09:46:42 crc kubenswrapper[4779]: I0929 09:46:42.938496 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6eead9b6-ef70-4326-be2e-07fc0e22c444-combined-ca-bundle\") pod \"6eead9b6-ef70-4326-be2e-07fc0e22c444\" (UID: \"6eead9b6-ef70-4326-be2e-07fc0e22c444\") " Sep 29 09:46:42 crc kubenswrapper[4779]: I0929 09:46:42.943361 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6eead9b6-ef70-4326-be2e-07fc0e22c444-kube-api-access-mx2xb" (OuterVolumeSpecName: "kube-api-access-mx2xb") pod "6eead9b6-ef70-4326-be2e-07fc0e22c444" (UID: "6eead9b6-ef70-4326-be2e-07fc0e22c444"). InnerVolumeSpecName "kube-api-access-mx2xb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:46:42 crc kubenswrapper[4779]: I0929 09:46:42.973227 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6eead9b6-ef70-4326-be2e-07fc0e22c444-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6eead9b6-ef70-4326-be2e-07fc0e22c444" (UID: "6eead9b6-ef70-4326-be2e-07fc0e22c444"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:46:42 crc kubenswrapper[4779]: I0929 09:46:42.979672 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6eead9b6-ef70-4326-be2e-07fc0e22c444-config-data" (OuterVolumeSpecName: "config-data") pod "6eead9b6-ef70-4326-be2e-07fc0e22c444" (UID: "6eead9b6-ef70-4326-be2e-07fc0e22c444"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.039831 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6eead9b6-ef70-4326-be2e-07fc0e22c444-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.039865 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mx2xb\" (UniqueName: \"kubernetes.io/projected/6eead9b6-ef70-4326-be2e-07fc0e22c444-kube-api-access-mx2xb\") on node \"crc\" DevicePath \"\"" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.039875 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6eead9b6-ef70-4326-be2e-07fc0e22c444-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.428414 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-4jkl4" event={"ID":"6eead9b6-ef70-4326-be2e-07fc0e22c444","Type":"ContainerDied","Data":"6c8fb643a56686d48b5961b798ee6fd2ef72eca1b820390f8acc9eb93818db6c"} Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.428743 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6c8fb643a56686d48b5961b798ee6fd2ef72eca1b820390f8acc9eb93818db6c" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.428529 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-4jkl4" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.660538 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-7nqcd"] Sep 29 09:46:43 crc kubenswrapper[4779]: E0929 09:46:43.660887 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbef3a83-4b34-466e-895f-1005a824efc0" containerName="watcher-db-sync" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.660919 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbef3a83-4b34-466e-895f-1005a824efc0" containerName="watcher-db-sync" Sep 29 09:46:43 crc kubenswrapper[4779]: E0929 09:46:43.660933 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6eead9b6-ef70-4326-be2e-07fc0e22c444" containerName="keystone-db-sync" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.660939 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="6eead9b6-ef70-4326-be2e-07fc0e22c444" containerName="keystone-db-sync" Sep 29 09:46:43 crc kubenswrapper[4779]: E0929 09:46:43.660951 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8687a876-1897-472b-9a43-dc6ab0fb034a" containerName="mariadb-account-create" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.660957 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="8687a876-1897-472b-9a43-dc6ab0fb034a" containerName="mariadb-account-create" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.661126 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="8687a876-1897-472b-9a43-dc6ab0fb034a" containerName="mariadb-account-create" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.661136 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="fbef3a83-4b34-466e-895f-1005a824efc0" containerName="watcher-db-sync" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.661147 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="6eead9b6-ef70-4326-be2e-07fc0e22c444" containerName="keystone-db-sync" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.661713 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-7nqcd" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.671224 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.671453 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-n7qtr" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.671583 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.671766 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.687951 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-7nqcd"] Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.728124 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77ddf8cf97-trh94"] Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.728332 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-77ddf8cf97-trh94" podUID="199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3" containerName="dnsmasq-dns" containerID="cri-o://853ef1b1de7f54a794f2c6e75aa586ea99522467c55aa66554e714b1a82e6806" gracePeriod=10 Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.768272 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/09209678-fb3b-46a8-b3a4-7c526eb785d0-credential-keys\") pod \"keystone-bootstrap-7nqcd\" (UID: \"09209678-fb3b-46a8-b3a4-7c526eb785d0\") " pod="openstack/keystone-bootstrap-7nqcd" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.768316 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-64hmq\" (UniqueName: \"kubernetes.io/projected/09209678-fb3b-46a8-b3a4-7c526eb785d0-kube-api-access-64hmq\") pod \"keystone-bootstrap-7nqcd\" (UID: \"09209678-fb3b-46a8-b3a4-7c526eb785d0\") " pod="openstack/keystone-bootstrap-7nqcd" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.768354 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/09209678-fb3b-46a8-b3a4-7c526eb785d0-scripts\") pod \"keystone-bootstrap-7nqcd\" (UID: \"09209678-fb3b-46a8-b3a4-7c526eb785d0\") " pod="openstack/keystone-bootstrap-7nqcd" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.768396 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09209678-fb3b-46a8-b3a4-7c526eb785d0-config-data\") pod \"keystone-bootstrap-7nqcd\" (UID: \"09209678-fb3b-46a8-b3a4-7c526eb785d0\") " pod="openstack/keystone-bootstrap-7nqcd" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.768422 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/09209678-fb3b-46a8-b3a4-7c526eb785d0-fernet-keys\") pod \"keystone-bootstrap-7nqcd\" (UID: \"09209678-fb3b-46a8-b3a4-7c526eb785d0\") " pod="openstack/keystone-bootstrap-7nqcd" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.768439 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09209678-fb3b-46a8-b3a4-7c526eb785d0-combined-ca-bundle\") pod \"keystone-bootstrap-7nqcd\" (UID: \"09209678-fb3b-46a8-b3a4-7c526eb785d0\") " pod="openstack/keystone-bootstrap-7nqcd" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.790065 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-decision-engine-0"] Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.791430 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-decision-engine-0" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.798842 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-watcher-dockercfg-264xg" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.799060 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-decision-engine-config-data" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.803118 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-api-0"] Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.804505 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.814118 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-api-config-data" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.815779 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-69d5d86869-mjnsr"] Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.817471 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-69d5d86869-mjnsr" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.843238 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-applier-0"] Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.844381 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-applier-0" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.861200 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-applier-config-data" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.861362 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-69d5d86869-mjnsr"] Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.864965 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-decision-engine-0"] Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.871837 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/09209678-fb3b-46a8-b3a4-7c526eb785d0-credential-keys\") pod \"keystone-bootstrap-7nqcd\" (UID: \"09209678-fb3b-46a8-b3a4-7c526eb785d0\") " pod="openstack/keystone-bootstrap-7nqcd" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.871883 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-64hmq\" (UniqueName: \"kubernetes.io/projected/09209678-fb3b-46a8-b3a4-7c526eb785d0-kube-api-access-64hmq\") pod \"keystone-bootstrap-7nqcd\" (UID: \"09209678-fb3b-46a8-b3a4-7c526eb785d0\") " pod="openstack/keystone-bootstrap-7nqcd" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.871960 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/09209678-fb3b-46a8-b3a4-7c526eb785d0-scripts\") pod \"keystone-bootstrap-7nqcd\" (UID: \"09209678-fb3b-46a8-b3a4-7c526eb785d0\") " pod="openstack/keystone-bootstrap-7nqcd" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.872018 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09209678-fb3b-46a8-b3a4-7c526eb785d0-config-data\") pod \"keystone-bootstrap-7nqcd\" (UID: \"09209678-fb3b-46a8-b3a4-7c526eb785d0\") " pod="openstack/keystone-bootstrap-7nqcd" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.872044 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/09209678-fb3b-46a8-b3a4-7c526eb785d0-fernet-keys\") pod \"keystone-bootstrap-7nqcd\" (UID: \"09209678-fb3b-46a8-b3a4-7c526eb785d0\") " pod="openstack/keystone-bootstrap-7nqcd" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.872061 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09209678-fb3b-46a8-b3a4-7c526eb785d0-combined-ca-bundle\") pod \"keystone-bootstrap-7nqcd\" (UID: \"09209678-fb3b-46a8-b3a4-7c526eb785d0\") " pod="openstack/keystone-bootstrap-7nqcd" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.878867 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-api-0"] Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.879466 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/09209678-fb3b-46a8-b3a4-7c526eb785d0-credential-keys\") pod \"keystone-bootstrap-7nqcd\" (UID: \"09209678-fb3b-46a8-b3a4-7c526eb785d0\") " pod="openstack/keystone-bootstrap-7nqcd" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.884636 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09209678-fb3b-46a8-b3a4-7c526eb785d0-config-data\") pod \"keystone-bootstrap-7nqcd\" (UID: \"09209678-fb3b-46a8-b3a4-7c526eb785d0\") " pod="openstack/keystone-bootstrap-7nqcd" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.887633 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/09209678-fb3b-46a8-b3a4-7c526eb785d0-fernet-keys\") pod \"keystone-bootstrap-7nqcd\" (UID: \"09209678-fb3b-46a8-b3a4-7c526eb785d0\") " pod="openstack/keystone-bootstrap-7nqcd" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.891661 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09209678-fb3b-46a8-b3a4-7c526eb785d0-combined-ca-bundle\") pod \"keystone-bootstrap-7nqcd\" (UID: \"09209678-fb3b-46a8-b3a4-7c526eb785d0\") " pod="openstack/keystone-bootstrap-7nqcd" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.895287 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-applier-0"] Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.918298 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/09209678-fb3b-46a8-b3a4-7c526eb785d0-scripts\") pod \"keystone-bootstrap-7nqcd\" (UID: \"09209678-fb3b-46a8-b3a4-7c526eb785d0\") " pod="openstack/keystone-bootstrap-7nqcd" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.923178 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-64hmq\" (UniqueName: \"kubernetes.io/projected/09209678-fb3b-46a8-b3a4-7c526eb785d0-kube-api-access-64hmq\") pod \"keystone-bootstrap-7nqcd\" (UID: \"09209678-fb3b-46a8-b3a4-7c526eb785d0\") " pod="openstack/keystone-bootstrap-7nqcd" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.972948 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9d5nj\" (UniqueName: \"kubernetes.io/projected/980dd73e-ec79-4751-ad16-7e95c4212336-kube-api-access-9d5nj\") pod \"watcher-api-0\" (UID: \"980dd73e-ec79-4751-ad16-7e95c4212336\") " pod="openstack/watcher-api-0" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.973012 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/524f6892-467d-4f42-b81a-5b4dbae841fd-config\") pod \"dnsmasq-dns-69d5d86869-mjnsr\" (UID: \"524f6892-467d-4f42-b81a-5b4dbae841fd\") " pod="openstack/dnsmasq-dns-69d5d86869-mjnsr" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.973047 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cdba510b-3621-4b0b-bdb1-ba1c455bcd4e-config-data\") pod \"watcher-applier-0\" (UID: \"cdba510b-3621-4b0b-bdb1-ba1c455bcd4e\") " pod="openstack/watcher-applier-0" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.973065 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f67e636b-969b-48ee-bbec-3d8b38b22274-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"f67e636b-969b-48ee-bbec-3d8b38b22274\") " pod="openstack/watcher-decision-engine-0" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.973090 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pmbnh\" (UniqueName: \"kubernetes.io/projected/524f6892-467d-4f42-b81a-5b4dbae841fd-kube-api-access-pmbnh\") pod \"dnsmasq-dns-69d5d86869-mjnsr\" (UID: \"524f6892-467d-4f42-b81a-5b4dbae841fd\") " pod="openstack/dnsmasq-dns-69d5d86869-mjnsr" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.973107 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/f67e636b-969b-48ee-bbec-3d8b38b22274-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"f67e636b-969b-48ee-bbec-3d8b38b22274\") " pod="openstack/watcher-decision-engine-0" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.973141 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w2vvk\" (UniqueName: \"kubernetes.io/projected/cdba510b-3621-4b0b-bdb1-ba1c455bcd4e-kube-api-access-w2vvk\") pod \"watcher-applier-0\" (UID: \"cdba510b-3621-4b0b-bdb1-ba1c455bcd4e\") " pod="openstack/watcher-applier-0" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.973160 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cdba510b-3621-4b0b-bdb1-ba1c455bcd4e-logs\") pod \"watcher-applier-0\" (UID: \"cdba510b-3621-4b0b-bdb1-ba1c455bcd4e\") " pod="openstack/watcher-applier-0" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.973174 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f67e636b-969b-48ee-bbec-3d8b38b22274-logs\") pod \"watcher-decision-engine-0\" (UID: \"f67e636b-969b-48ee-bbec-3d8b38b22274\") " pod="openstack/watcher-decision-engine-0" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.973188 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/980dd73e-ec79-4751-ad16-7e95c4212336-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"980dd73e-ec79-4751-ad16-7e95c4212336\") " pod="openstack/watcher-api-0" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.973206 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/524f6892-467d-4f42-b81a-5b4dbae841fd-ovsdbserver-nb\") pod \"dnsmasq-dns-69d5d86869-mjnsr\" (UID: \"524f6892-467d-4f42-b81a-5b4dbae841fd\") " pod="openstack/dnsmasq-dns-69d5d86869-mjnsr" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.973228 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/524f6892-467d-4f42-b81a-5b4dbae841fd-ovsdbserver-sb\") pod \"dnsmasq-dns-69d5d86869-mjnsr\" (UID: \"524f6892-467d-4f42-b81a-5b4dbae841fd\") " pod="openstack/dnsmasq-dns-69d5d86869-mjnsr" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.973243 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/980dd73e-ec79-4751-ad16-7e95c4212336-config-data\") pod \"watcher-api-0\" (UID: \"980dd73e-ec79-4751-ad16-7e95c4212336\") " pod="openstack/watcher-api-0" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.973258 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/980dd73e-ec79-4751-ad16-7e95c4212336-logs\") pod \"watcher-api-0\" (UID: \"980dd73e-ec79-4751-ad16-7e95c4212336\") " pod="openstack/watcher-api-0" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.973271 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f67e636b-969b-48ee-bbec-3d8b38b22274-config-data\") pod \"watcher-decision-engine-0\" (UID: \"f67e636b-969b-48ee-bbec-3d8b38b22274\") " pod="openstack/watcher-decision-engine-0" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.973283 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cdba510b-3621-4b0b-bdb1-ba1c455bcd4e-combined-ca-bundle\") pod \"watcher-applier-0\" (UID: \"cdba510b-3621-4b0b-bdb1-ba1c455bcd4e\") " pod="openstack/watcher-applier-0" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.973298 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/524f6892-467d-4f42-b81a-5b4dbae841fd-dns-svc\") pod \"dnsmasq-dns-69d5d86869-mjnsr\" (UID: \"524f6892-467d-4f42-b81a-5b4dbae841fd\") " pod="openstack/dnsmasq-dns-69d5d86869-mjnsr" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.973314 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jbdmx\" (UniqueName: \"kubernetes.io/projected/f67e636b-969b-48ee-bbec-3d8b38b22274-kube-api-access-jbdmx\") pod \"watcher-decision-engine-0\" (UID: \"f67e636b-969b-48ee-bbec-3d8b38b22274\") " pod="openstack/watcher-decision-engine-0" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.973362 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/980dd73e-ec79-4751-ad16-7e95c4212336-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"980dd73e-ec79-4751-ad16-7e95c4212336\") " pod="openstack/watcher-api-0" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.977247 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-7rjgf"] Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.978272 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-7rjgf" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.983271 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.983575 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.989566 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-fts6t" Sep 29 09:46:43 crc kubenswrapper[4779]: I0929 09:46:43.992008 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-7nqcd" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.042304 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-7rjgf"] Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.075025 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/980dd73e-ec79-4751-ad16-7e95c4212336-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"980dd73e-ec79-4751-ad16-7e95c4212336\") " pod="openstack/watcher-api-0" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.075077 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9d5nj\" (UniqueName: \"kubernetes.io/projected/980dd73e-ec79-4751-ad16-7e95c4212336-kube-api-access-9d5nj\") pod \"watcher-api-0\" (UID: \"980dd73e-ec79-4751-ad16-7e95c4212336\") " pod="openstack/watcher-api-0" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.075119 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ce07226-a77d-46d9-a099-04375136d8fc-config-data\") pod \"cinder-db-sync-7rjgf\" (UID: \"7ce07226-a77d-46d9-a099-04375136d8fc\") " pod="openstack/cinder-db-sync-7rjgf" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.075140 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/524f6892-467d-4f42-b81a-5b4dbae841fd-config\") pod \"dnsmasq-dns-69d5d86869-mjnsr\" (UID: \"524f6892-467d-4f42-b81a-5b4dbae841fd\") " pod="openstack/dnsmasq-dns-69d5d86869-mjnsr" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.075159 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ce07226-a77d-46d9-a099-04375136d8fc-combined-ca-bundle\") pod \"cinder-db-sync-7rjgf\" (UID: \"7ce07226-a77d-46d9-a099-04375136d8fc\") " pod="openstack/cinder-db-sync-7rjgf" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.075184 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cdba510b-3621-4b0b-bdb1-ba1c455bcd4e-config-data\") pod \"watcher-applier-0\" (UID: \"cdba510b-3621-4b0b-bdb1-ba1c455bcd4e\") " pod="openstack/watcher-applier-0" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.075203 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f67e636b-969b-48ee-bbec-3d8b38b22274-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"f67e636b-969b-48ee-bbec-3d8b38b22274\") " pod="openstack/watcher-decision-engine-0" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.075227 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pmbnh\" (UniqueName: \"kubernetes.io/projected/524f6892-467d-4f42-b81a-5b4dbae841fd-kube-api-access-pmbnh\") pod \"dnsmasq-dns-69d5d86869-mjnsr\" (UID: \"524f6892-467d-4f42-b81a-5b4dbae841fd\") " pod="openstack/dnsmasq-dns-69d5d86869-mjnsr" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.075243 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/f67e636b-969b-48ee-bbec-3d8b38b22274-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"f67e636b-969b-48ee-bbec-3d8b38b22274\") " pod="openstack/watcher-decision-engine-0" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.075258 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cr9vh\" (UniqueName: \"kubernetes.io/projected/7ce07226-a77d-46d9-a099-04375136d8fc-kube-api-access-cr9vh\") pod \"cinder-db-sync-7rjgf\" (UID: \"7ce07226-a77d-46d9-a099-04375136d8fc\") " pod="openstack/cinder-db-sync-7rjgf" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.075296 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w2vvk\" (UniqueName: \"kubernetes.io/projected/cdba510b-3621-4b0b-bdb1-ba1c455bcd4e-kube-api-access-w2vvk\") pod \"watcher-applier-0\" (UID: \"cdba510b-3621-4b0b-bdb1-ba1c455bcd4e\") " pod="openstack/watcher-applier-0" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.075353 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f67e636b-969b-48ee-bbec-3d8b38b22274-logs\") pod \"watcher-decision-engine-0\" (UID: \"f67e636b-969b-48ee-bbec-3d8b38b22274\") " pod="openstack/watcher-decision-engine-0" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.075367 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cdba510b-3621-4b0b-bdb1-ba1c455bcd4e-logs\") pod \"watcher-applier-0\" (UID: \"cdba510b-3621-4b0b-bdb1-ba1c455bcd4e\") " pod="openstack/watcher-applier-0" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.075383 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/980dd73e-ec79-4751-ad16-7e95c4212336-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"980dd73e-ec79-4751-ad16-7e95c4212336\") " pod="openstack/watcher-api-0" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.075405 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/524f6892-467d-4f42-b81a-5b4dbae841fd-ovsdbserver-nb\") pod \"dnsmasq-dns-69d5d86869-mjnsr\" (UID: \"524f6892-467d-4f42-b81a-5b4dbae841fd\") " pod="openstack/dnsmasq-dns-69d5d86869-mjnsr" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.075426 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/524f6892-467d-4f42-b81a-5b4dbae841fd-ovsdbserver-sb\") pod \"dnsmasq-dns-69d5d86869-mjnsr\" (UID: \"524f6892-467d-4f42-b81a-5b4dbae841fd\") " pod="openstack/dnsmasq-dns-69d5d86869-mjnsr" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.075446 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/980dd73e-ec79-4751-ad16-7e95c4212336-config-data\") pod \"watcher-api-0\" (UID: \"980dd73e-ec79-4751-ad16-7e95c4212336\") " pod="openstack/watcher-api-0" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.075461 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f67e636b-969b-48ee-bbec-3d8b38b22274-config-data\") pod \"watcher-decision-engine-0\" (UID: \"f67e636b-969b-48ee-bbec-3d8b38b22274\") " pod="openstack/watcher-decision-engine-0" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.075476 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cdba510b-3621-4b0b-bdb1-ba1c455bcd4e-combined-ca-bundle\") pod \"watcher-applier-0\" (UID: \"cdba510b-3621-4b0b-bdb1-ba1c455bcd4e\") " pod="openstack/watcher-applier-0" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.075490 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/980dd73e-ec79-4751-ad16-7e95c4212336-logs\") pod \"watcher-api-0\" (UID: \"980dd73e-ec79-4751-ad16-7e95c4212336\") " pod="openstack/watcher-api-0" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.075508 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/524f6892-467d-4f42-b81a-5b4dbae841fd-dns-svc\") pod \"dnsmasq-dns-69d5d86869-mjnsr\" (UID: \"524f6892-467d-4f42-b81a-5b4dbae841fd\") " pod="openstack/dnsmasq-dns-69d5d86869-mjnsr" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.075524 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jbdmx\" (UniqueName: \"kubernetes.io/projected/f67e636b-969b-48ee-bbec-3d8b38b22274-kube-api-access-jbdmx\") pod \"watcher-decision-engine-0\" (UID: \"f67e636b-969b-48ee-bbec-3d8b38b22274\") " pod="openstack/watcher-decision-engine-0" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.075542 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7ce07226-a77d-46d9-a099-04375136d8fc-db-sync-config-data\") pod \"cinder-db-sync-7rjgf\" (UID: \"7ce07226-a77d-46d9-a099-04375136d8fc\") " pod="openstack/cinder-db-sync-7rjgf" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.075559 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7ce07226-a77d-46d9-a099-04375136d8fc-etc-machine-id\") pod \"cinder-db-sync-7rjgf\" (UID: \"7ce07226-a77d-46d9-a099-04375136d8fc\") " pod="openstack/cinder-db-sync-7rjgf" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.075574 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ce07226-a77d-46d9-a099-04375136d8fc-scripts\") pod \"cinder-db-sync-7rjgf\" (UID: \"7ce07226-a77d-46d9-a099-04375136d8fc\") " pod="openstack/cinder-db-sync-7rjgf" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.077363 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cdba510b-3621-4b0b-bdb1-ba1c455bcd4e-logs\") pod \"watcher-applier-0\" (UID: \"cdba510b-3621-4b0b-bdb1-ba1c455bcd4e\") " pod="openstack/watcher-applier-0" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.078405 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/524f6892-467d-4f42-b81a-5b4dbae841fd-config\") pod \"dnsmasq-dns-69d5d86869-mjnsr\" (UID: \"524f6892-467d-4f42-b81a-5b4dbae841fd\") " pod="openstack/dnsmasq-dns-69d5d86869-mjnsr" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.086230 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.091513 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cdba510b-3621-4b0b-bdb1-ba1c455bcd4e-combined-ca-bundle\") pod \"watcher-applier-0\" (UID: \"cdba510b-3621-4b0b-bdb1-ba1c455bcd4e\") " pod="openstack/watcher-applier-0" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.087400 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/524f6892-467d-4f42-b81a-5b4dbae841fd-dns-svc\") pod \"dnsmasq-dns-69d5d86869-mjnsr\" (UID: \"524f6892-467d-4f42-b81a-5b4dbae841fd\") " pod="openstack/dnsmasq-dns-69d5d86869-mjnsr" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.087897 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/524f6892-467d-4f42-b81a-5b4dbae841fd-ovsdbserver-sb\") pod \"dnsmasq-dns-69d5d86869-mjnsr\" (UID: \"524f6892-467d-4f42-b81a-5b4dbae841fd\") " pod="openstack/dnsmasq-dns-69d5d86869-mjnsr" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.095285 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.097341 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f67e636b-969b-48ee-bbec-3d8b38b22274-logs\") pod \"watcher-decision-engine-0\" (UID: \"f67e636b-969b-48ee-bbec-3d8b38b22274\") " pod="openstack/watcher-decision-engine-0" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.097565 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/524f6892-467d-4f42-b81a-5b4dbae841fd-ovsdbserver-nb\") pod \"dnsmasq-dns-69d5d86869-mjnsr\" (UID: \"524f6892-467d-4f42-b81a-5b4dbae841fd\") " pod="openstack/dnsmasq-dns-69d5d86869-mjnsr" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.101536 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f67e636b-969b-48ee-bbec-3d8b38b22274-config-data\") pod \"watcher-decision-engine-0\" (UID: \"f67e636b-969b-48ee-bbec-3d8b38b22274\") " pod="openstack/watcher-decision-engine-0" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.086803 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/980dd73e-ec79-4751-ad16-7e95c4212336-logs\") pod \"watcher-api-0\" (UID: \"980dd73e-ec79-4751-ad16-7e95c4212336\") " pod="openstack/watcher-api-0" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.103524 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.104306 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/980dd73e-ec79-4751-ad16-7e95c4212336-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"980dd73e-ec79-4751-ad16-7e95c4212336\") " pod="openstack/watcher-api-0" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.105159 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/980dd73e-ec79-4751-ad16-7e95c4212336-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"980dd73e-ec79-4751-ad16-7e95c4212336\") " pod="openstack/watcher-api-0" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.108832 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.109302 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.110918 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cdba510b-3621-4b0b-bdb1-ba1c455bcd4e-config-data\") pod \"watcher-applier-0\" (UID: \"cdba510b-3621-4b0b-bdb1-ba1c455bcd4e\") " pod="openstack/watcher-applier-0" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.113375 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/f67e636b-969b-48ee-bbec-3d8b38b22274-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"f67e636b-969b-48ee-bbec-3d8b38b22274\") " pod="openstack/watcher-decision-engine-0" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.113824 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f67e636b-969b-48ee-bbec-3d8b38b22274-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"f67e636b-969b-48ee-bbec-3d8b38b22274\") " pod="openstack/watcher-decision-engine-0" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.114005 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-fh8wz"] Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.114855 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jbdmx\" (UniqueName: \"kubernetes.io/projected/f67e636b-969b-48ee-bbec-3d8b38b22274-kube-api-access-jbdmx\") pod \"watcher-decision-engine-0\" (UID: \"f67e636b-969b-48ee-bbec-3d8b38b22274\") " pod="openstack/watcher-decision-engine-0" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.116424 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-fh8wz" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.120578 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-9kwx6" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.120943 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.120962 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pmbnh\" (UniqueName: \"kubernetes.io/projected/524f6892-467d-4f42-b81a-5b4dbae841fd-kube-api-access-pmbnh\") pod \"dnsmasq-dns-69d5d86869-mjnsr\" (UID: \"524f6892-467d-4f42-b81a-5b4dbae841fd\") " pod="openstack/dnsmasq-dns-69d5d86869-mjnsr" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.121749 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.134940 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w2vvk\" (UniqueName: \"kubernetes.io/projected/cdba510b-3621-4b0b-bdb1-ba1c455bcd4e-kube-api-access-w2vvk\") pod \"watcher-applier-0\" (UID: \"cdba510b-3621-4b0b-bdb1-ba1c455bcd4e\") " pod="openstack/watcher-applier-0" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.138926 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9d5nj\" (UniqueName: \"kubernetes.io/projected/980dd73e-ec79-4751-ad16-7e95c4212336-kube-api-access-9d5nj\") pod \"watcher-api-0\" (UID: \"980dd73e-ec79-4751-ad16-7e95c4212336\") " pod="openstack/watcher-api-0" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.159342 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/980dd73e-ec79-4751-ad16-7e95c4212336-config-data\") pod \"watcher-api-0\" (UID: \"980dd73e-ec79-4751-ad16-7e95c4212336\") " pod="openstack/watcher-api-0" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.180850 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cr9vh\" (UniqueName: \"kubernetes.io/projected/7ce07226-a77d-46d9-a099-04375136d8fc-kube-api-access-cr9vh\") pod \"cinder-db-sync-7rjgf\" (UID: \"7ce07226-a77d-46d9-a099-04375136d8fc\") " pod="openstack/cinder-db-sync-7rjgf" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.180925 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/785e1b61-3f05-4ac1-ade7-57c25fe6f177-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"785e1b61-3f05-4ac1-ade7-57c25fe6f177\") " pod="openstack/ceilometer-0" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.180975 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/785e1b61-3f05-4ac1-ade7-57c25fe6f177-scripts\") pod \"ceilometer-0\" (UID: \"785e1b61-3f05-4ac1-ade7-57c25fe6f177\") " pod="openstack/ceilometer-0" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.181029 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/785e1b61-3f05-4ac1-ade7-57c25fe6f177-log-httpd\") pod \"ceilometer-0\" (UID: \"785e1b61-3f05-4ac1-ade7-57c25fe6f177\") " pod="openstack/ceilometer-0" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.181076 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7ce07226-a77d-46d9-a099-04375136d8fc-db-sync-config-data\") pod \"cinder-db-sync-7rjgf\" (UID: \"7ce07226-a77d-46d9-a099-04375136d8fc\") " pod="openstack/cinder-db-sync-7rjgf" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.181097 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7ce07226-a77d-46d9-a099-04375136d8fc-etc-machine-id\") pod \"cinder-db-sync-7rjgf\" (UID: \"7ce07226-a77d-46d9-a099-04375136d8fc\") " pod="openstack/cinder-db-sync-7rjgf" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.181117 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ce07226-a77d-46d9-a099-04375136d8fc-scripts\") pod \"cinder-db-sync-7rjgf\" (UID: \"7ce07226-a77d-46d9-a099-04375136d8fc\") " pod="openstack/cinder-db-sync-7rjgf" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.181145 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/785e1b61-3f05-4ac1-ade7-57c25fe6f177-run-httpd\") pod \"ceilometer-0\" (UID: \"785e1b61-3f05-4ac1-ade7-57c25fe6f177\") " pod="openstack/ceilometer-0" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.181177 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/785e1b61-3f05-4ac1-ade7-57c25fe6f177-config-data\") pod \"ceilometer-0\" (UID: \"785e1b61-3f05-4ac1-ade7-57c25fe6f177\") " pod="openstack/ceilometer-0" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.181211 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/785e1b61-3f05-4ac1-ade7-57c25fe6f177-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"785e1b61-3f05-4ac1-ade7-57c25fe6f177\") " pod="openstack/ceilometer-0" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.181244 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ce07226-a77d-46d9-a099-04375136d8fc-config-data\") pod \"cinder-db-sync-7rjgf\" (UID: \"7ce07226-a77d-46d9-a099-04375136d8fc\") " pod="openstack/cinder-db-sync-7rjgf" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.181280 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ce07226-a77d-46d9-a099-04375136d8fc-combined-ca-bundle\") pod \"cinder-db-sync-7rjgf\" (UID: \"7ce07226-a77d-46d9-a099-04375136d8fc\") " pod="openstack/cinder-db-sync-7rjgf" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.181307 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f4l9v\" (UniqueName: \"kubernetes.io/projected/785e1b61-3f05-4ac1-ade7-57c25fe6f177-kube-api-access-f4l9v\") pod \"ceilometer-0\" (UID: \"785e1b61-3f05-4ac1-ade7-57c25fe6f177\") " pod="openstack/ceilometer-0" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.183196 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7ce07226-a77d-46d9-a099-04375136d8fc-etc-machine-id\") pod \"cinder-db-sync-7rjgf\" (UID: \"7ce07226-a77d-46d9-a099-04375136d8fc\") " pod="openstack/cinder-db-sync-7rjgf" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.187473 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7ce07226-a77d-46d9-a099-04375136d8fc-db-sync-config-data\") pod \"cinder-db-sync-7rjgf\" (UID: \"7ce07226-a77d-46d9-a099-04375136d8fc\") " pod="openstack/cinder-db-sync-7rjgf" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.192447 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ce07226-a77d-46d9-a099-04375136d8fc-combined-ca-bundle\") pod \"cinder-db-sync-7rjgf\" (UID: \"7ce07226-a77d-46d9-a099-04375136d8fc\") " pod="openstack/cinder-db-sync-7rjgf" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.199237 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cr9vh\" (UniqueName: \"kubernetes.io/projected/7ce07226-a77d-46d9-a099-04375136d8fc-kube-api-access-cr9vh\") pod \"cinder-db-sync-7rjgf\" (UID: \"7ce07226-a77d-46d9-a099-04375136d8fc\") " pod="openstack/cinder-db-sync-7rjgf" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.203066 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ce07226-a77d-46d9-a099-04375136d8fc-config-data\") pod \"cinder-db-sync-7rjgf\" (UID: \"7ce07226-a77d-46d9-a099-04375136d8fc\") " pod="openstack/cinder-db-sync-7rjgf" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.207942 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ce07226-a77d-46d9-a099-04375136d8fc-scripts\") pod \"cinder-db-sync-7rjgf\" (UID: \"7ce07226-a77d-46d9-a099-04375136d8fc\") " pod="openstack/cinder-db-sync-7rjgf" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.209860 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-69d5d86869-mjnsr"] Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.212235 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-69d5d86869-mjnsr" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.248893 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-fh8wz"] Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.259570 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6ffc8d9cff-x6d6w"] Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.261098 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6ffc8d9cff-x6d6w" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.281169 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6ffc8d9cff-x6d6w"] Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.282488 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/785e1b61-3f05-4ac1-ade7-57c25fe6f177-config-data\") pod \"ceilometer-0\" (UID: \"785e1b61-3f05-4ac1-ade7-57c25fe6f177\") " pod="openstack/ceilometer-0" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.282523 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-khr8g\" (UniqueName: \"kubernetes.io/projected/d52d2b76-d868-4f6d-ab27-20d1d8223952-kube-api-access-khr8g\") pod \"placement-db-sync-fh8wz\" (UID: \"d52d2b76-d868-4f6d-ab27-20d1d8223952\") " pod="openstack/placement-db-sync-fh8wz" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.282542 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d52d2b76-d868-4f6d-ab27-20d1d8223952-logs\") pod \"placement-db-sync-fh8wz\" (UID: \"d52d2b76-d868-4f6d-ab27-20d1d8223952\") " pod="openstack/placement-db-sync-fh8wz" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.282574 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/785e1b61-3f05-4ac1-ade7-57c25fe6f177-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"785e1b61-3f05-4ac1-ade7-57c25fe6f177\") " pod="openstack/ceilometer-0" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.282595 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d52d2b76-d868-4f6d-ab27-20d1d8223952-scripts\") pod \"placement-db-sync-fh8wz\" (UID: \"d52d2b76-d868-4f6d-ab27-20d1d8223952\") " pod="openstack/placement-db-sync-fh8wz" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.282633 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f4l9v\" (UniqueName: \"kubernetes.io/projected/785e1b61-3f05-4ac1-ade7-57c25fe6f177-kube-api-access-f4l9v\") pod \"ceilometer-0\" (UID: \"785e1b61-3f05-4ac1-ade7-57c25fe6f177\") " pod="openstack/ceilometer-0" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.282674 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/785e1b61-3f05-4ac1-ade7-57c25fe6f177-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"785e1b61-3f05-4ac1-ade7-57c25fe6f177\") " pod="openstack/ceilometer-0" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.282717 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/785e1b61-3f05-4ac1-ade7-57c25fe6f177-scripts\") pod \"ceilometer-0\" (UID: \"785e1b61-3f05-4ac1-ade7-57c25fe6f177\") " pod="openstack/ceilometer-0" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.282737 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d52d2b76-d868-4f6d-ab27-20d1d8223952-config-data\") pod \"placement-db-sync-fh8wz\" (UID: \"d52d2b76-d868-4f6d-ab27-20d1d8223952\") " pod="openstack/placement-db-sync-fh8wz" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.282760 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/785e1b61-3f05-4ac1-ade7-57c25fe6f177-log-httpd\") pod \"ceilometer-0\" (UID: \"785e1b61-3f05-4ac1-ade7-57c25fe6f177\") " pod="openstack/ceilometer-0" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.282781 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d52d2b76-d868-4f6d-ab27-20d1d8223952-combined-ca-bundle\") pod \"placement-db-sync-fh8wz\" (UID: \"d52d2b76-d868-4f6d-ab27-20d1d8223952\") " pod="openstack/placement-db-sync-fh8wz" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.282808 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/785e1b61-3f05-4ac1-ade7-57c25fe6f177-run-httpd\") pod \"ceilometer-0\" (UID: \"785e1b61-3f05-4ac1-ade7-57c25fe6f177\") " pod="openstack/ceilometer-0" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.283238 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/785e1b61-3f05-4ac1-ade7-57c25fe6f177-run-httpd\") pod \"ceilometer-0\" (UID: \"785e1b61-3f05-4ac1-ade7-57c25fe6f177\") " pod="openstack/ceilometer-0" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.284310 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/785e1b61-3f05-4ac1-ade7-57c25fe6f177-log-httpd\") pod \"ceilometer-0\" (UID: \"785e1b61-3f05-4ac1-ade7-57c25fe6f177\") " pod="openstack/ceilometer-0" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.287041 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/785e1b61-3f05-4ac1-ade7-57c25fe6f177-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"785e1b61-3f05-4ac1-ade7-57c25fe6f177\") " pod="openstack/ceilometer-0" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.287269 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/785e1b61-3f05-4ac1-ade7-57c25fe6f177-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"785e1b61-3f05-4ac1-ade7-57c25fe6f177\") " pod="openstack/ceilometer-0" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.288235 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/785e1b61-3f05-4ac1-ade7-57c25fe6f177-scripts\") pod \"ceilometer-0\" (UID: \"785e1b61-3f05-4ac1-ade7-57c25fe6f177\") " pod="openstack/ceilometer-0" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.288954 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/785e1b61-3f05-4ac1-ade7-57c25fe6f177-config-data\") pod \"ceilometer-0\" (UID: \"785e1b61-3f05-4ac1-ade7-57c25fe6f177\") " pod="openstack/ceilometer-0" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.290840 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-decision-engine-0" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.299444 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.306805 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f4l9v\" (UniqueName: \"kubernetes.io/projected/785e1b61-3f05-4ac1-ade7-57c25fe6f177-kube-api-access-f4l9v\") pod \"ceilometer-0\" (UID: \"785e1b61-3f05-4ac1-ade7-57c25fe6f177\") " pod="openstack/ceilometer-0" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.332392 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-applier-0" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.349847 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-7rjgf" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.384871 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5406545d-6af2-453b-bad1-c5d0d58521ff-ovsdbserver-sb\") pod \"dnsmasq-dns-6ffc8d9cff-x6d6w\" (UID: \"5406545d-6af2-453b-bad1-c5d0d58521ff\") " pod="openstack/dnsmasq-dns-6ffc8d9cff-x6d6w" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.384924 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5406545d-6af2-453b-bad1-c5d0d58521ff-ovsdbserver-nb\") pod \"dnsmasq-dns-6ffc8d9cff-x6d6w\" (UID: \"5406545d-6af2-453b-bad1-c5d0d58521ff\") " pod="openstack/dnsmasq-dns-6ffc8d9cff-x6d6w" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.384945 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5406545d-6af2-453b-bad1-c5d0d58521ff-dns-svc\") pod \"dnsmasq-dns-6ffc8d9cff-x6d6w\" (UID: \"5406545d-6af2-453b-bad1-c5d0d58521ff\") " pod="openstack/dnsmasq-dns-6ffc8d9cff-x6d6w" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.384967 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fg7dc\" (UniqueName: \"kubernetes.io/projected/5406545d-6af2-453b-bad1-c5d0d58521ff-kube-api-access-fg7dc\") pod \"dnsmasq-dns-6ffc8d9cff-x6d6w\" (UID: \"5406545d-6af2-453b-bad1-c5d0d58521ff\") " pod="openstack/dnsmasq-dns-6ffc8d9cff-x6d6w" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.385004 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d52d2b76-d868-4f6d-ab27-20d1d8223952-config-data\") pod \"placement-db-sync-fh8wz\" (UID: \"d52d2b76-d868-4f6d-ab27-20d1d8223952\") " pod="openstack/placement-db-sync-fh8wz" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.385036 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d52d2b76-d868-4f6d-ab27-20d1d8223952-combined-ca-bundle\") pod \"placement-db-sync-fh8wz\" (UID: \"d52d2b76-d868-4f6d-ab27-20d1d8223952\") " pod="openstack/placement-db-sync-fh8wz" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.385080 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-khr8g\" (UniqueName: \"kubernetes.io/projected/d52d2b76-d868-4f6d-ab27-20d1d8223952-kube-api-access-khr8g\") pod \"placement-db-sync-fh8wz\" (UID: \"d52d2b76-d868-4f6d-ab27-20d1d8223952\") " pod="openstack/placement-db-sync-fh8wz" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.385096 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d52d2b76-d868-4f6d-ab27-20d1d8223952-logs\") pod \"placement-db-sync-fh8wz\" (UID: \"d52d2b76-d868-4f6d-ab27-20d1d8223952\") " pod="openstack/placement-db-sync-fh8wz" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.385137 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d52d2b76-d868-4f6d-ab27-20d1d8223952-scripts\") pod \"placement-db-sync-fh8wz\" (UID: \"d52d2b76-d868-4f6d-ab27-20d1d8223952\") " pod="openstack/placement-db-sync-fh8wz" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.385157 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5406545d-6af2-453b-bad1-c5d0d58521ff-config\") pod \"dnsmasq-dns-6ffc8d9cff-x6d6w\" (UID: \"5406545d-6af2-453b-bad1-c5d0d58521ff\") " pod="openstack/dnsmasq-dns-6ffc8d9cff-x6d6w" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.387022 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d52d2b76-d868-4f6d-ab27-20d1d8223952-logs\") pod \"placement-db-sync-fh8wz\" (UID: \"d52d2b76-d868-4f6d-ab27-20d1d8223952\") " pod="openstack/placement-db-sync-fh8wz" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.390807 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d52d2b76-d868-4f6d-ab27-20d1d8223952-scripts\") pod \"placement-db-sync-fh8wz\" (UID: \"d52d2b76-d868-4f6d-ab27-20d1d8223952\") " pod="openstack/placement-db-sync-fh8wz" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.390972 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d52d2b76-d868-4f6d-ab27-20d1d8223952-config-data\") pod \"placement-db-sync-fh8wz\" (UID: \"d52d2b76-d868-4f6d-ab27-20d1d8223952\") " pod="openstack/placement-db-sync-fh8wz" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.392868 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d52d2b76-d868-4f6d-ab27-20d1d8223952-combined-ca-bundle\") pod \"placement-db-sync-fh8wz\" (UID: \"d52d2b76-d868-4f6d-ab27-20d1d8223952\") " pod="openstack/placement-db-sync-fh8wz" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.402857 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-khr8g\" (UniqueName: \"kubernetes.io/projected/d52d2b76-d868-4f6d-ab27-20d1d8223952-kube-api-access-khr8g\") pod \"placement-db-sync-fh8wz\" (UID: \"d52d2b76-d868-4f6d-ab27-20d1d8223952\") " pod="openstack/placement-db-sync-fh8wz" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.434258 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77ddf8cf97-trh94" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.449037 4779 generic.go:334] "Generic (PLEG): container finished" podID="199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3" containerID="853ef1b1de7f54a794f2c6e75aa586ea99522467c55aa66554e714b1a82e6806" exitCode=0 Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.449083 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77ddf8cf97-trh94" event={"ID":"199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3","Type":"ContainerDied","Data":"853ef1b1de7f54a794f2c6e75aa586ea99522467c55aa66554e714b1a82e6806"} Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.449111 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77ddf8cf97-trh94" event={"ID":"199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3","Type":"ContainerDied","Data":"d507cbe2f967936ea6be5e1e5c09dd5023affde185b46a7c72a5e54c15671af0"} Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.449130 4779 scope.go:117] "RemoveContainer" containerID="853ef1b1de7f54a794f2c6e75aa586ea99522467c55aa66554e714b1a82e6806" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.449264 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77ddf8cf97-trh94" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.484339 4779 scope.go:117] "RemoveContainer" containerID="bef107173159ad405307da7fca2d3fef988314b0b9a4785e081478be765b5563" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.486982 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5406545d-6af2-453b-bad1-c5d0d58521ff-config\") pod \"dnsmasq-dns-6ffc8d9cff-x6d6w\" (UID: \"5406545d-6af2-453b-bad1-c5d0d58521ff\") " pod="openstack/dnsmasq-dns-6ffc8d9cff-x6d6w" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.487068 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5406545d-6af2-453b-bad1-c5d0d58521ff-ovsdbserver-sb\") pod \"dnsmasq-dns-6ffc8d9cff-x6d6w\" (UID: \"5406545d-6af2-453b-bad1-c5d0d58521ff\") " pod="openstack/dnsmasq-dns-6ffc8d9cff-x6d6w" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.487092 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5406545d-6af2-453b-bad1-c5d0d58521ff-ovsdbserver-nb\") pod \"dnsmasq-dns-6ffc8d9cff-x6d6w\" (UID: \"5406545d-6af2-453b-bad1-c5d0d58521ff\") " pod="openstack/dnsmasq-dns-6ffc8d9cff-x6d6w" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.487111 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5406545d-6af2-453b-bad1-c5d0d58521ff-dns-svc\") pod \"dnsmasq-dns-6ffc8d9cff-x6d6w\" (UID: \"5406545d-6af2-453b-bad1-c5d0d58521ff\") " pod="openstack/dnsmasq-dns-6ffc8d9cff-x6d6w" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.487158 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fg7dc\" (UniqueName: \"kubernetes.io/projected/5406545d-6af2-453b-bad1-c5d0d58521ff-kube-api-access-fg7dc\") pod \"dnsmasq-dns-6ffc8d9cff-x6d6w\" (UID: \"5406545d-6af2-453b-bad1-c5d0d58521ff\") " pod="openstack/dnsmasq-dns-6ffc8d9cff-x6d6w" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.487939 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5406545d-6af2-453b-bad1-c5d0d58521ff-ovsdbserver-sb\") pod \"dnsmasq-dns-6ffc8d9cff-x6d6w\" (UID: \"5406545d-6af2-453b-bad1-c5d0d58521ff\") " pod="openstack/dnsmasq-dns-6ffc8d9cff-x6d6w" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.488623 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5406545d-6af2-453b-bad1-c5d0d58521ff-config\") pod \"dnsmasq-dns-6ffc8d9cff-x6d6w\" (UID: \"5406545d-6af2-453b-bad1-c5d0d58521ff\") " pod="openstack/dnsmasq-dns-6ffc8d9cff-x6d6w" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.489285 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5406545d-6af2-453b-bad1-c5d0d58521ff-ovsdbserver-nb\") pod \"dnsmasq-dns-6ffc8d9cff-x6d6w\" (UID: \"5406545d-6af2-453b-bad1-c5d0d58521ff\") " pod="openstack/dnsmasq-dns-6ffc8d9cff-x6d6w" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.489564 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5406545d-6af2-453b-bad1-c5d0d58521ff-dns-svc\") pod \"dnsmasq-dns-6ffc8d9cff-x6d6w\" (UID: \"5406545d-6af2-453b-bad1-c5d0d58521ff\") " pod="openstack/dnsmasq-dns-6ffc8d9cff-x6d6w" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.513050 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fg7dc\" (UniqueName: \"kubernetes.io/projected/5406545d-6af2-453b-bad1-c5d0d58521ff-kube-api-access-fg7dc\") pod \"dnsmasq-dns-6ffc8d9cff-x6d6w\" (UID: \"5406545d-6af2-453b-bad1-c5d0d58521ff\") " pod="openstack/dnsmasq-dns-6ffc8d9cff-x6d6w" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.516785 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.519521 4779 scope.go:117] "RemoveContainer" containerID="853ef1b1de7f54a794f2c6e75aa586ea99522467c55aa66554e714b1a82e6806" Sep 29 09:46:44 crc kubenswrapper[4779]: E0929 09:46:44.519826 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"853ef1b1de7f54a794f2c6e75aa586ea99522467c55aa66554e714b1a82e6806\": container with ID starting with 853ef1b1de7f54a794f2c6e75aa586ea99522467c55aa66554e714b1a82e6806 not found: ID does not exist" containerID="853ef1b1de7f54a794f2c6e75aa586ea99522467c55aa66554e714b1a82e6806" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.519849 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"853ef1b1de7f54a794f2c6e75aa586ea99522467c55aa66554e714b1a82e6806"} err="failed to get container status \"853ef1b1de7f54a794f2c6e75aa586ea99522467c55aa66554e714b1a82e6806\": rpc error: code = NotFound desc = could not find container \"853ef1b1de7f54a794f2c6e75aa586ea99522467c55aa66554e714b1a82e6806\": container with ID starting with 853ef1b1de7f54a794f2c6e75aa586ea99522467c55aa66554e714b1a82e6806 not found: ID does not exist" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.519867 4779 scope.go:117] "RemoveContainer" containerID="bef107173159ad405307da7fca2d3fef988314b0b9a4785e081478be765b5563" Sep 29 09:46:44 crc kubenswrapper[4779]: E0929 09:46:44.520055 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bef107173159ad405307da7fca2d3fef988314b0b9a4785e081478be765b5563\": container with ID starting with bef107173159ad405307da7fca2d3fef988314b0b9a4785e081478be765b5563 not found: ID does not exist" containerID="bef107173159ad405307da7fca2d3fef988314b0b9a4785e081478be765b5563" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.520071 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bef107173159ad405307da7fca2d3fef988314b0b9a4785e081478be765b5563"} err="failed to get container status \"bef107173159ad405307da7fca2d3fef988314b0b9a4785e081478be765b5563\": rpc error: code = NotFound desc = could not find container \"bef107173159ad405307da7fca2d3fef988314b0b9a4785e081478be765b5563\": container with ID starting with bef107173159ad405307da7fca2d3fef988314b0b9a4785e081478be765b5563 not found: ID does not exist" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.543146 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-fh8wz" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.582134 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6ffc8d9cff-x6d6w" Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.595746 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3-ovsdbserver-sb\") pod \"199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3\" (UID: \"199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3\") " Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.595913 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-flpvf\" (UniqueName: \"kubernetes.io/projected/199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3-kube-api-access-flpvf\") pod \"199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3\" (UID: \"199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3\") " Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.596360 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3-dns-svc\") pod \"199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3\" (UID: \"199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3\") " Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.596430 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3-ovsdbserver-nb\") pod \"199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3\" (UID: \"199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3\") " Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.596445 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3-config\") pod \"199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3\" (UID: \"199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3\") " Sep 29 09:46:44 crc kubenswrapper[4779]: I0929 09:46:44.617533 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3-kube-api-access-flpvf" (OuterVolumeSpecName: "kube-api-access-flpvf") pod "199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3" (UID: "199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3"). InnerVolumeSpecName "kube-api-access-flpvf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:46:45 crc kubenswrapper[4779]: I0929 09:46:44.670343 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3" (UID: "199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:46:45 crc kubenswrapper[4779]: I0929 09:46:44.691472 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3" (UID: "199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:46:45 crc kubenswrapper[4779]: I0929 09:46:44.698506 4779 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 09:46:45 crc kubenswrapper[4779]: I0929 09:46:44.698532 4779 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 09:46:45 crc kubenswrapper[4779]: I0929 09:46:44.698545 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-flpvf\" (UniqueName: \"kubernetes.io/projected/199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3-kube-api-access-flpvf\") on node \"crc\" DevicePath \"\"" Sep 29 09:46:45 crc kubenswrapper[4779]: I0929 09:46:44.704392 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3-config" (OuterVolumeSpecName: "config") pod "199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3" (UID: "199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:46:45 crc kubenswrapper[4779]: I0929 09:46:44.708884 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3" (UID: "199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:46:45 crc kubenswrapper[4779]: I0929 09:46:44.710056 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-69d5d86869-mjnsr"] Sep 29 09:46:45 crc kubenswrapper[4779]: I0929 09:46:44.728433 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-7nqcd"] Sep 29 09:46:45 crc kubenswrapper[4779]: I0929 09:46:44.794358 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77ddf8cf97-trh94"] Sep 29 09:46:45 crc kubenswrapper[4779]: I0929 09:46:44.802257 4779 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 09:46:45 crc kubenswrapper[4779]: I0929 09:46:44.802286 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3-config\") on node \"crc\" DevicePath \"\"" Sep 29 09:46:45 crc kubenswrapper[4779]: I0929 09:46:44.813863 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-77ddf8cf97-trh94"] Sep 29 09:46:45 crc kubenswrapper[4779]: I0929 09:46:45.457746 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-7nqcd" event={"ID":"09209678-fb3b-46a8-b3a4-7c526eb785d0","Type":"ContainerStarted","Data":"3386d853ea16d45ed25d2463b56a6269729e3608892fc940164ef824e377547b"} Sep 29 09:46:45 crc kubenswrapper[4779]: I0929 09:46:45.457993 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-7nqcd" event={"ID":"09209678-fb3b-46a8-b3a4-7c526eb785d0","Type":"ContainerStarted","Data":"136fdde37476b5992d690a90afda9fd94452f72d63168ab59a6e1cc6e2b5649b"} Sep 29 09:46:45 crc kubenswrapper[4779]: I0929 09:46:45.462227 4779 generic.go:334] "Generic (PLEG): container finished" podID="524f6892-467d-4f42-b81a-5b4dbae841fd" containerID="07c9098f3f6776a18f50dd52039b63faa87c142be08f10e4c4a1975e57337379" exitCode=0 Sep 29 09:46:45 crc kubenswrapper[4779]: I0929 09:46:45.462393 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-69d5d86869-mjnsr" event={"ID":"524f6892-467d-4f42-b81a-5b4dbae841fd","Type":"ContainerDied","Data":"07c9098f3f6776a18f50dd52039b63faa87c142be08f10e4c4a1975e57337379"} Sep 29 09:46:45 crc kubenswrapper[4779]: I0929 09:46:45.462450 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-69d5d86869-mjnsr" event={"ID":"524f6892-467d-4f42-b81a-5b4dbae841fd","Type":"ContainerStarted","Data":"52e8d4a9e9273f27469b82ae3a48125d8094d3129b7a1006e87407b4fbffeb25"} Sep 29 09:46:45 crc kubenswrapper[4779]: I0929 09:46:45.480814 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-7nqcd" podStartSLOduration=2.480791794 podStartE2EDuration="2.480791794s" podCreationTimestamp="2025-09-29 09:46:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:46:45.479341452 +0000 UTC m=+1037.460665356" watchObservedRunningTime="2025-09-29 09:46:45.480791794 +0000 UTC m=+1037.462115708" Sep 29 09:46:45 crc kubenswrapper[4779]: I0929 09:46:45.524369 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-e005-account-create-fcctv"] Sep 29 09:46:45 crc kubenswrapper[4779]: E0929 09:46:45.525080 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3" containerName="dnsmasq-dns" Sep 29 09:46:45 crc kubenswrapper[4779]: I0929 09:46:45.525095 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3" containerName="dnsmasq-dns" Sep 29 09:46:45 crc kubenswrapper[4779]: E0929 09:46:45.525110 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3" containerName="init" Sep 29 09:46:45 crc kubenswrapper[4779]: I0929 09:46:45.525128 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3" containerName="init" Sep 29 09:46:45 crc kubenswrapper[4779]: I0929 09:46:45.525286 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3" containerName="dnsmasq-dns" Sep 29 09:46:45 crc kubenswrapper[4779]: I0929 09:46:45.525864 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-e005-account-create-fcctv" Sep 29 09:46:45 crc kubenswrapper[4779]: I0929 09:46:45.548330 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-e005-account-create-fcctv"] Sep 29 09:46:45 crc kubenswrapper[4779]: I0929 09:46:45.552663 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Sep 29 09:46:45 crc kubenswrapper[4779]: I0929 09:46:45.618030 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jvmqc\" (UniqueName: \"kubernetes.io/projected/e274f651-2029-48e9-9142-bb141ebdf551-kube-api-access-jvmqc\") pod \"barbican-e005-account-create-fcctv\" (UID: \"e274f651-2029-48e9-9142-bb141ebdf551\") " pod="openstack/barbican-e005-account-create-fcctv" Sep 29 09:46:45 crc kubenswrapper[4779]: I0929 09:46:45.716166 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-df0d-account-create-zpd2h"] Sep 29 09:46:45 crc kubenswrapper[4779]: I0929 09:46:45.718796 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-df0d-account-create-zpd2h" Sep 29 09:46:45 crc kubenswrapper[4779]: I0929 09:46:45.720629 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jvmqc\" (UniqueName: \"kubernetes.io/projected/e274f651-2029-48e9-9142-bb141ebdf551-kube-api-access-jvmqc\") pod \"barbican-e005-account-create-fcctv\" (UID: \"e274f651-2029-48e9-9142-bb141ebdf551\") " pod="openstack/barbican-e005-account-create-fcctv" Sep 29 09:46:45 crc kubenswrapper[4779]: I0929 09:46:45.726091 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Sep 29 09:46:45 crc kubenswrapper[4779]: I0929 09:46:45.738164 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-df0d-account-create-zpd2h"] Sep 29 09:46:45 crc kubenswrapper[4779]: I0929 09:46:45.783834 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jvmqc\" (UniqueName: \"kubernetes.io/projected/e274f651-2029-48e9-9142-bb141ebdf551-kube-api-access-jvmqc\") pod \"barbican-e005-account-create-fcctv\" (UID: \"e274f651-2029-48e9-9142-bb141ebdf551\") " pod="openstack/barbican-e005-account-create-fcctv" Sep 29 09:46:45 crc kubenswrapper[4779]: I0929 09:46:45.822426 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g6gt5\" (UniqueName: \"kubernetes.io/projected/2f74fcce-3141-4e78-a238-d10f3ff03a11-kube-api-access-g6gt5\") pod \"neutron-df0d-account-create-zpd2h\" (UID: \"2f74fcce-3141-4e78-a238-d10f3ff03a11\") " pod="openstack/neutron-df0d-account-create-zpd2h" Sep 29 09:46:45 crc kubenswrapper[4779]: I0929 09:46:45.843582 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-e005-account-create-fcctv" Sep 29 09:46:45 crc kubenswrapper[4779]: I0929 09:46:45.927068 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g6gt5\" (UniqueName: \"kubernetes.io/projected/2f74fcce-3141-4e78-a238-d10f3ff03a11-kube-api-access-g6gt5\") pod \"neutron-df0d-account-create-zpd2h\" (UID: \"2f74fcce-3141-4e78-a238-d10f3ff03a11\") " pod="openstack/neutron-df0d-account-create-zpd2h" Sep 29 09:46:45 crc kubenswrapper[4779]: I0929 09:46:45.950297 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g6gt5\" (UniqueName: \"kubernetes.io/projected/2f74fcce-3141-4e78-a238-d10f3ff03a11-kube-api-access-g6gt5\") pod \"neutron-df0d-account-create-zpd2h\" (UID: \"2f74fcce-3141-4e78-a238-d10f3ff03a11\") " pod="openstack/neutron-df0d-account-create-zpd2h" Sep 29 09:46:46 crc kubenswrapper[4779]: I0929 09:46:46.048482 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-df0d-account-create-zpd2h" Sep 29 09:46:46 crc kubenswrapper[4779]: W0929 09:46:46.282148 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf67e636b_969b_48ee_bbec_3d8b38b22274.slice/crio-57d94e80b743a707414e4e07a75c1ca84c2998e24c168ff843f3ec6827c4fe90 WatchSource:0}: Error finding container 57d94e80b743a707414e4e07a75c1ca84c2998e24c168ff843f3ec6827c4fe90: Status 404 returned error can't find the container with id 57d94e80b743a707414e4e07a75c1ca84c2998e24c168ff843f3ec6827c4fe90 Sep 29 09:46:46 crc kubenswrapper[4779]: W0929 09:46:46.290974 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod980dd73e_ec79_4751_ad16_7e95c4212336.slice/crio-2c28c1666012045329a44733f36d6e6666a27ff3009ce63a9602ee2aa9d88264 WatchSource:0}: Error finding container 2c28c1666012045329a44733f36d6e6666a27ff3009ce63a9602ee2aa9d88264: Status 404 returned error can't find the container with id 2c28c1666012045329a44733f36d6e6666a27ff3009ce63a9602ee2aa9d88264 Sep 29 09:46:46 crc kubenswrapper[4779]: I0929 09:46:46.292494 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-decision-engine-0"] Sep 29 09:46:46 crc kubenswrapper[4779]: I0929 09:46:46.304570 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-api-0"] Sep 29 09:46:46 crc kubenswrapper[4779]: I0929 09:46:46.369572 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-applier-0"] Sep 29 09:46:46 crc kubenswrapper[4779]: I0929 09:46:46.385428 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-7rjgf"] Sep 29 09:46:46 crc kubenswrapper[4779]: W0929 09:46:46.415664 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7ce07226_a77d_46d9_a099_04375136d8fc.slice/crio-e719e61e3059397f328eaa3f003565b92388b98011a62e8a06499163ad4a26ce WatchSource:0}: Error finding container e719e61e3059397f328eaa3f003565b92388b98011a62e8a06499163ad4a26ce: Status 404 returned error can't find the container with id e719e61e3059397f328eaa3f003565b92388b98011a62e8a06499163ad4a26ce Sep 29 09:46:46 crc kubenswrapper[4779]: I0929 09:46:46.426306 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-69d5d86869-mjnsr" Sep 29 09:46:46 crc kubenswrapper[4779]: I0929 09:46:46.534515 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-69d5d86869-mjnsr" Sep 29 09:46:46 crc kubenswrapper[4779]: I0929 09:46:46.534954 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-69d5d86869-mjnsr" event={"ID":"524f6892-467d-4f42-b81a-5b4dbae841fd","Type":"ContainerDied","Data":"52e8d4a9e9273f27469b82ae3a48125d8094d3129b7a1006e87407b4fbffeb25"} Sep 29 09:46:46 crc kubenswrapper[4779]: I0929 09:46:46.535015 4779 scope.go:117] "RemoveContainer" containerID="07c9098f3f6776a18f50dd52039b63faa87c142be08f10e4c4a1975e57337379" Sep 29 09:46:46 crc kubenswrapper[4779]: I0929 09:46:46.536239 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"f67e636b-969b-48ee-bbec-3d8b38b22274","Type":"ContainerStarted","Data":"57d94e80b743a707414e4e07a75c1ca84c2998e24c168ff843f3ec6827c4fe90"} Sep 29 09:46:46 crc kubenswrapper[4779]: I0929 09:46:46.547840 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/524f6892-467d-4f42-b81a-5b4dbae841fd-dns-svc\") pod \"524f6892-467d-4f42-b81a-5b4dbae841fd\" (UID: \"524f6892-467d-4f42-b81a-5b4dbae841fd\") " Sep 29 09:46:46 crc kubenswrapper[4779]: I0929 09:46:46.547945 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/524f6892-467d-4f42-b81a-5b4dbae841fd-config\") pod \"524f6892-467d-4f42-b81a-5b4dbae841fd\" (UID: \"524f6892-467d-4f42-b81a-5b4dbae841fd\") " Sep 29 09:46:46 crc kubenswrapper[4779]: I0929 09:46:46.547992 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/524f6892-467d-4f42-b81a-5b4dbae841fd-ovsdbserver-nb\") pod \"524f6892-467d-4f42-b81a-5b4dbae841fd\" (UID: \"524f6892-467d-4f42-b81a-5b4dbae841fd\") " Sep 29 09:46:46 crc kubenswrapper[4779]: I0929 09:46:46.548059 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pmbnh\" (UniqueName: \"kubernetes.io/projected/524f6892-467d-4f42-b81a-5b4dbae841fd-kube-api-access-pmbnh\") pod \"524f6892-467d-4f42-b81a-5b4dbae841fd\" (UID: \"524f6892-467d-4f42-b81a-5b4dbae841fd\") " Sep 29 09:46:46 crc kubenswrapper[4779]: I0929 09:46:46.548085 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/524f6892-467d-4f42-b81a-5b4dbae841fd-ovsdbserver-sb\") pod \"524f6892-467d-4f42-b81a-5b4dbae841fd\" (UID: \"524f6892-467d-4f42-b81a-5b4dbae841fd\") " Sep 29 09:46:46 crc kubenswrapper[4779]: I0929 09:46:46.557079 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-7rjgf" event={"ID":"7ce07226-a77d-46d9-a099-04375136d8fc","Type":"ContainerStarted","Data":"e719e61e3059397f328eaa3f003565b92388b98011a62e8a06499163ad4a26ce"} Sep 29 09:46:46 crc kubenswrapper[4779]: I0929 09:46:46.565590 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/524f6892-467d-4f42-b81a-5b4dbae841fd-kube-api-access-pmbnh" (OuterVolumeSpecName: "kube-api-access-pmbnh") pod "524f6892-467d-4f42-b81a-5b4dbae841fd" (UID: "524f6892-467d-4f42-b81a-5b4dbae841fd"). InnerVolumeSpecName "kube-api-access-pmbnh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:46:46 crc kubenswrapper[4779]: I0929 09:46:46.576314 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"980dd73e-ec79-4751-ad16-7e95c4212336","Type":"ContainerStarted","Data":"2c28c1666012045329a44733f36d6e6666a27ff3009ce63a9602ee2aa9d88264"} Sep 29 09:46:46 crc kubenswrapper[4779]: I0929 09:46:46.582328 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-applier-0" event={"ID":"cdba510b-3621-4b0b-bdb1-ba1c455bcd4e","Type":"ContainerStarted","Data":"619c418c9c1fb149cb4061885b17219a3b0b39234fc1568c3a59aea8d78a0566"} Sep 29 09:46:46 crc kubenswrapper[4779]: I0929 09:46:46.610423 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-api-0"] Sep 29 09:46:46 crc kubenswrapper[4779]: I0929 09:46:46.611688 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/524f6892-467d-4f42-b81a-5b4dbae841fd-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "524f6892-467d-4f42-b81a-5b4dbae841fd" (UID: "524f6892-467d-4f42-b81a-5b4dbae841fd"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:46:46 crc kubenswrapper[4779]: I0929 09:46:46.623891 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/524f6892-467d-4f42-b81a-5b4dbae841fd-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "524f6892-467d-4f42-b81a-5b4dbae841fd" (UID: "524f6892-467d-4f42-b81a-5b4dbae841fd"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:46:46 crc kubenswrapper[4779]: I0929 09:46:46.651223 4779 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/524f6892-467d-4f42-b81a-5b4dbae841fd-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 09:46:46 crc kubenswrapper[4779]: I0929 09:46:46.651260 4779 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/524f6892-467d-4f42-b81a-5b4dbae841fd-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 09:46:46 crc kubenswrapper[4779]: I0929 09:46:46.651274 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pmbnh\" (UniqueName: \"kubernetes.io/projected/524f6892-467d-4f42-b81a-5b4dbae841fd-kube-api-access-pmbnh\") on node \"crc\" DevicePath \"\"" Sep 29 09:46:46 crc kubenswrapper[4779]: I0929 09:46:46.707330 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/524f6892-467d-4f42-b81a-5b4dbae841fd-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "524f6892-467d-4f42-b81a-5b4dbae841fd" (UID: "524f6892-467d-4f42-b81a-5b4dbae841fd"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:46:46 crc kubenswrapper[4779]: I0929 09:46:46.707554 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/524f6892-467d-4f42-b81a-5b4dbae841fd-config" (OuterVolumeSpecName: "config") pod "524f6892-467d-4f42-b81a-5b4dbae841fd" (UID: "524f6892-467d-4f42-b81a-5b4dbae841fd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:46:46 crc kubenswrapper[4779]: I0929 09:46:46.753399 4779 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/524f6892-467d-4f42-b81a-5b4dbae841fd-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 09:46:46 crc kubenswrapper[4779]: I0929 09:46:46.753427 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/524f6892-467d-4f42-b81a-5b4dbae841fd-config\") on node \"crc\" DevicePath \"\"" Sep 29 09:46:46 crc kubenswrapper[4779]: I0929 09:46:46.773986 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3" path="/var/lib/kubelet/pods/199b0e0f-bf7e-4e39-b7a0-497f3dd1c5e3/volumes" Sep 29 09:46:46 crc kubenswrapper[4779]: I0929 09:46:46.775337 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 09:46:46 crc kubenswrapper[4779]: I0929 09:46:46.775432 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-fh8wz"] Sep 29 09:46:46 crc kubenswrapper[4779]: I0929 09:46:46.780869 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6ffc8d9cff-x6d6w"] Sep 29 09:46:46 crc kubenswrapper[4779]: I0929 09:46:46.817038 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-e005-account-create-fcctv"] Sep 29 09:46:46 crc kubenswrapper[4779]: I0929 09:46:46.834291 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 09:46:46 crc kubenswrapper[4779]: I0929 09:46:46.882015 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-df0d-account-create-zpd2h"] Sep 29 09:46:46 crc kubenswrapper[4779]: I0929 09:46:46.916440 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-69d5d86869-mjnsr"] Sep 29 09:46:46 crc kubenswrapper[4779]: I0929 09:46:46.931415 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-69d5d86869-mjnsr"] Sep 29 09:46:46 crc kubenswrapper[4779]: I0929 09:46:46.966057 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 09:46:46 crc kubenswrapper[4779]: I0929 09:46:46.966101 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 09:46:47 crc kubenswrapper[4779]: I0929 09:46:47.596797 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"980dd73e-ec79-4751-ad16-7e95c4212336","Type":"ContainerStarted","Data":"f2a8438b8ffac394ddf09e52db5bd98307e8ebb4e9b6ff951fac9d9c11db30a2"} Sep 29 09:46:47 crc kubenswrapper[4779]: I0929 09:46:47.597162 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"980dd73e-ec79-4751-ad16-7e95c4212336","Type":"ContainerStarted","Data":"49642ae0081bfac6765277a35635b1f996ecd19270eb87473540b952c15ad91e"} Sep 29 09:46:47 crc kubenswrapper[4779]: I0929 09:46:47.596919 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/watcher-api-0" podUID="980dd73e-ec79-4751-ad16-7e95c4212336" containerName="watcher-api-log" containerID="cri-o://49642ae0081bfac6765277a35635b1f996ecd19270eb87473540b952c15ad91e" gracePeriod=30 Sep 29 09:46:47 crc kubenswrapper[4779]: I0929 09:46:47.597210 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-api-0" Sep 29 09:46:47 crc kubenswrapper[4779]: I0929 09:46:47.597033 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/watcher-api-0" podUID="980dd73e-ec79-4751-ad16-7e95c4212336" containerName="watcher-api" containerID="cri-o://f2a8438b8ffac394ddf09e52db5bd98307e8ebb4e9b6ff951fac9d9c11db30a2" gracePeriod=30 Sep 29 09:46:47 crc kubenswrapper[4779]: I0929 09:46:47.600821 4779 generic.go:334] "Generic (PLEG): container finished" podID="e274f651-2029-48e9-9142-bb141ebdf551" containerID="655290d1944f6c2a344112d44d901a4645b3ca42747d27ba3cf78796cfc8123f" exitCode=0 Sep 29 09:46:47 crc kubenswrapper[4779]: I0929 09:46:47.601024 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-e005-account-create-fcctv" event={"ID":"e274f651-2029-48e9-9142-bb141ebdf551","Type":"ContainerDied","Data":"655290d1944f6c2a344112d44d901a4645b3ca42747d27ba3cf78796cfc8123f"} Sep 29 09:46:47 crc kubenswrapper[4779]: I0929 09:46:47.601050 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-e005-account-create-fcctv" event={"ID":"e274f651-2029-48e9-9142-bb141ebdf551","Type":"ContainerStarted","Data":"dd59dc3ca2842ec55ff406b7c51d56cecb354924f456082f7bf2a704b6d9a353"} Sep 29 09:46:47 crc kubenswrapper[4779]: I0929 09:46:47.602354 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-fh8wz" event={"ID":"d52d2b76-d868-4f6d-ab27-20d1d8223952","Type":"ContainerStarted","Data":"88fd8af55d559fde82abd60ce34ddbdfb628cb83a21c61f982d03bb2b4ea6a16"} Sep 29 09:46:47 crc kubenswrapper[4779]: I0929 09:46:47.604039 4779 generic.go:334] "Generic (PLEG): container finished" podID="5406545d-6af2-453b-bad1-c5d0d58521ff" containerID="87456fb09ed4d074f0627af9fe9c61abe763bd1ca3f19a47437645b44ebf1085" exitCode=0 Sep 29 09:46:47 crc kubenswrapper[4779]: I0929 09:46:47.604090 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6ffc8d9cff-x6d6w" event={"ID":"5406545d-6af2-453b-bad1-c5d0d58521ff","Type":"ContainerDied","Data":"87456fb09ed4d074f0627af9fe9c61abe763bd1ca3f19a47437645b44ebf1085"} Sep 29 09:46:47 crc kubenswrapper[4779]: I0929 09:46:47.604112 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6ffc8d9cff-x6d6w" event={"ID":"5406545d-6af2-453b-bad1-c5d0d58521ff","Type":"ContainerStarted","Data":"121bfccf3e14645e45053c827def35bfe85058e4f3a8d9e48ec04d321bcef36f"} Sep 29 09:46:47 crc kubenswrapper[4779]: I0929 09:46:47.608314 4779 generic.go:334] "Generic (PLEG): container finished" podID="2f74fcce-3141-4e78-a238-d10f3ff03a11" containerID="4969b95ad9ab5f47b8b94d066a3810fc308403f20352f8fcdb69eef9b6a6217d" exitCode=0 Sep 29 09:46:47 crc kubenswrapper[4779]: I0929 09:46:47.608389 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-df0d-account-create-zpd2h" event={"ID":"2f74fcce-3141-4e78-a238-d10f3ff03a11","Type":"ContainerDied","Data":"4969b95ad9ab5f47b8b94d066a3810fc308403f20352f8fcdb69eef9b6a6217d"} Sep 29 09:46:47 crc kubenswrapper[4779]: I0929 09:46:47.608424 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-df0d-account-create-zpd2h" event={"ID":"2f74fcce-3141-4e78-a238-d10f3ff03a11","Type":"ContainerStarted","Data":"c9a6c0252dc8dcfd8aa7be9e0d7e2e2444fb165b21e5acdd572394179ba30640"} Sep 29 09:46:47 crc kubenswrapper[4779]: I0929 09:46:47.613279 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/watcher-api-0" podUID="980dd73e-ec79-4751-ad16-7e95c4212336" containerName="watcher-api" probeResult="failure" output="Get \"http://10.217.0.143:9322/\": EOF" Sep 29 09:46:47 crc kubenswrapper[4779]: I0929 09:46:47.616358 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-api-0" podStartSLOduration=4.616340078 podStartE2EDuration="4.616340078s" podCreationTimestamp="2025-09-29 09:46:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:46:47.61155231 +0000 UTC m=+1039.592876214" watchObservedRunningTime="2025-09-29 09:46:47.616340078 +0000 UTC m=+1039.597663982" Sep 29 09:46:47 crc kubenswrapper[4779]: I0929 09:46:47.628637 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"785e1b61-3f05-4ac1-ade7-57c25fe6f177","Type":"ContainerStarted","Data":"8bfe561dcb1fb50cbfca99cc51e4d2ac5adf5bf75409d47b9f9ebaa22c1ff4a0"} Sep 29 09:46:48 crc kubenswrapper[4779]: I0929 09:46:48.638063 4779 generic.go:334] "Generic (PLEG): container finished" podID="980dd73e-ec79-4751-ad16-7e95c4212336" containerID="49642ae0081bfac6765277a35635b1f996ecd19270eb87473540b952c15ad91e" exitCode=143 Sep 29 09:46:48 crc kubenswrapper[4779]: I0929 09:46:48.638129 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"980dd73e-ec79-4751-ad16-7e95c4212336","Type":"ContainerDied","Data":"49642ae0081bfac6765277a35635b1f996ecd19270eb87473540b952c15ad91e"} Sep 29 09:46:48 crc kubenswrapper[4779]: I0929 09:46:48.788368 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="524f6892-467d-4f42-b81a-5b4dbae841fd" path="/var/lib/kubelet/pods/524f6892-467d-4f42-b81a-5b4dbae841fd/volumes" Sep 29 09:46:49 crc kubenswrapper[4779]: I0929 09:46:49.299537 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-api-0" Sep 29 09:46:50 crc kubenswrapper[4779]: I0929 09:46:50.096762 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-df0d-account-create-zpd2h" Sep 29 09:46:50 crc kubenswrapper[4779]: I0929 09:46:50.099464 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-e005-account-create-fcctv" Sep 29 09:46:50 crc kubenswrapper[4779]: I0929 09:46:50.228963 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g6gt5\" (UniqueName: \"kubernetes.io/projected/2f74fcce-3141-4e78-a238-d10f3ff03a11-kube-api-access-g6gt5\") pod \"2f74fcce-3141-4e78-a238-d10f3ff03a11\" (UID: \"2f74fcce-3141-4e78-a238-d10f3ff03a11\") " Sep 29 09:46:50 crc kubenswrapper[4779]: I0929 09:46:50.229053 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jvmqc\" (UniqueName: \"kubernetes.io/projected/e274f651-2029-48e9-9142-bb141ebdf551-kube-api-access-jvmqc\") pod \"e274f651-2029-48e9-9142-bb141ebdf551\" (UID: \"e274f651-2029-48e9-9142-bb141ebdf551\") " Sep 29 09:46:50 crc kubenswrapper[4779]: I0929 09:46:50.235634 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e274f651-2029-48e9-9142-bb141ebdf551-kube-api-access-jvmqc" (OuterVolumeSpecName: "kube-api-access-jvmqc") pod "e274f651-2029-48e9-9142-bb141ebdf551" (UID: "e274f651-2029-48e9-9142-bb141ebdf551"). InnerVolumeSpecName "kube-api-access-jvmqc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:46:50 crc kubenswrapper[4779]: I0929 09:46:50.247682 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2f74fcce-3141-4e78-a238-d10f3ff03a11-kube-api-access-g6gt5" (OuterVolumeSpecName: "kube-api-access-g6gt5") pod "2f74fcce-3141-4e78-a238-d10f3ff03a11" (UID: "2f74fcce-3141-4e78-a238-d10f3ff03a11"). InnerVolumeSpecName "kube-api-access-g6gt5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:46:50 crc kubenswrapper[4779]: I0929 09:46:50.331142 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g6gt5\" (UniqueName: \"kubernetes.io/projected/2f74fcce-3141-4e78-a238-d10f3ff03a11-kube-api-access-g6gt5\") on node \"crc\" DevicePath \"\"" Sep 29 09:46:50 crc kubenswrapper[4779]: I0929 09:46:50.331500 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jvmqc\" (UniqueName: \"kubernetes.io/projected/e274f651-2029-48e9-9142-bb141ebdf551-kube-api-access-jvmqc\") on node \"crc\" DevicePath \"\"" Sep 29 09:46:50 crc kubenswrapper[4779]: I0929 09:46:50.677605 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-e005-account-create-fcctv" event={"ID":"e274f651-2029-48e9-9142-bb141ebdf551","Type":"ContainerDied","Data":"dd59dc3ca2842ec55ff406b7c51d56cecb354924f456082f7bf2a704b6d9a353"} Sep 29 09:46:50 crc kubenswrapper[4779]: I0929 09:46:50.677641 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dd59dc3ca2842ec55ff406b7c51d56cecb354924f456082f7bf2a704b6d9a353" Sep 29 09:46:50 crc kubenswrapper[4779]: I0929 09:46:50.677638 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-e005-account-create-fcctv" Sep 29 09:46:50 crc kubenswrapper[4779]: I0929 09:46:50.682597 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6ffc8d9cff-x6d6w" event={"ID":"5406545d-6af2-453b-bad1-c5d0d58521ff","Type":"ContainerStarted","Data":"4507e239262228d4799c22c9fb73b0dba34b73e02661f912fac2588ed376c19d"} Sep 29 09:46:50 crc kubenswrapper[4779]: I0929 09:46:50.683634 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6ffc8d9cff-x6d6w" Sep 29 09:46:50 crc kubenswrapper[4779]: I0929 09:46:50.685463 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-df0d-account-create-zpd2h" event={"ID":"2f74fcce-3141-4e78-a238-d10f3ff03a11","Type":"ContainerDied","Data":"c9a6c0252dc8dcfd8aa7be9e0d7e2e2444fb165b21e5acdd572394179ba30640"} Sep 29 09:46:50 crc kubenswrapper[4779]: I0929 09:46:50.685496 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c9a6c0252dc8dcfd8aa7be9e0d7e2e2444fb165b21e5acdd572394179ba30640" Sep 29 09:46:50 crc kubenswrapper[4779]: I0929 09:46:50.685499 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-df0d-account-create-zpd2h" Sep 29 09:46:50 crc kubenswrapper[4779]: I0929 09:46:50.705512 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6ffc8d9cff-x6d6w" podStartSLOduration=6.705493212 podStartE2EDuration="6.705493212s" podCreationTimestamp="2025-09-29 09:46:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:46:50.705362118 +0000 UTC m=+1042.686686022" watchObservedRunningTime="2025-09-29 09:46:50.705493212 +0000 UTC m=+1042.686817116" Sep 29 09:46:51 crc kubenswrapper[4779]: I0929 09:46:51.352524 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/watcher-api-0" podUID="980dd73e-ec79-4751-ad16-7e95c4212336" containerName="watcher-api" probeResult="failure" output="Get \"http://10.217.0.143:9322/\": read tcp 10.217.0.2:49586->10.217.0.143:9322: read: connection reset by peer" Sep 29 09:46:51 crc kubenswrapper[4779]: I0929 09:46:51.705340 4779 generic.go:334] "Generic (PLEG): container finished" podID="980dd73e-ec79-4751-ad16-7e95c4212336" containerID="f2a8438b8ffac394ddf09e52db5bd98307e8ebb4e9b6ff951fac9d9c11db30a2" exitCode=0 Sep 29 09:46:51 crc kubenswrapper[4779]: I0929 09:46:51.705523 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"980dd73e-ec79-4751-ad16-7e95c4212336","Type":"ContainerDied","Data":"f2a8438b8ffac394ddf09e52db5bd98307e8ebb4e9b6ff951fac9d9c11db30a2"} Sep 29 09:46:51 crc kubenswrapper[4779]: I0929 09:46:51.707862 4779 generic.go:334] "Generic (PLEG): container finished" podID="09209678-fb3b-46a8-b3a4-7c526eb785d0" containerID="3386d853ea16d45ed25d2463b56a6269729e3608892fc940164ef824e377547b" exitCode=0 Sep 29 09:46:51 crc kubenswrapper[4779]: I0929 09:46:51.708099 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-7nqcd" event={"ID":"09209678-fb3b-46a8-b3a4-7c526eb785d0","Type":"ContainerDied","Data":"3386d853ea16d45ed25d2463b56a6269729e3608892fc940164ef824e377547b"} Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.444598 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-7nqcd" Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.493568 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.507393 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09209678-fb3b-46a8-b3a4-7c526eb785d0-combined-ca-bundle\") pod \"09209678-fb3b-46a8-b3a4-7c526eb785d0\" (UID: \"09209678-fb3b-46a8-b3a4-7c526eb785d0\") " Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.507474 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-64hmq\" (UniqueName: \"kubernetes.io/projected/09209678-fb3b-46a8-b3a4-7c526eb785d0-kube-api-access-64hmq\") pod \"09209678-fb3b-46a8-b3a4-7c526eb785d0\" (UID: \"09209678-fb3b-46a8-b3a4-7c526eb785d0\") " Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.507516 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/09209678-fb3b-46a8-b3a4-7c526eb785d0-scripts\") pod \"09209678-fb3b-46a8-b3a4-7c526eb785d0\" (UID: \"09209678-fb3b-46a8-b3a4-7c526eb785d0\") " Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.507536 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/09209678-fb3b-46a8-b3a4-7c526eb785d0-credential-keys\") pod \"09209678-fb3b-46a8-b3a4-7c526eb785d0\" (UID: \"09209678-fb3b-46a8-b3a4-7c526eb785d0\") " Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.507578 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/09209678-fb3b-46a8-b3a4-7c526eb785d0-fernet-keys\") pod \"09209678-fb3b-46a8-b3a4-7c526eb785d0\" (UID: \"09209678-fb3b-46a8-b3a4-7c526eb785d0\") " Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.507614 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09209678-fb3b-46a8-b3a4-7c526eb785d0-config-data\") pod \"09209678-fb3b-46a8-b3a4-7c526eb785d0\" (UID: \"09209678-fb3b-46a8-b3a4-7c526eb785d0\") " Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.516675 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09209678-fb3b-46a8-b3a4-7c526eb785d0-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "09209678-fb3b-46a8-b3a4-7c526eb785d0" (UID: "09209678-fb3b-46a8-b3a4-7c526eb785d0"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.517497 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09209678-fb3b-46a8-b3a4-7c526eb785d0-kube-api-access-64hmq" (OuterVolumeSpecName: "kube-api-access-64hmq") pod "09209678-fb3b-46a8-b3a4-7c526eb785d0" (UID: "09209678-fb3b-46a8-b3a4-7c526eb785d0"). InnerVolumeSpecName "kube-api-access-64hmq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.520372 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09209678-fb3b-46a8-b3a4-7c526eb785d0-scripts" (OuterVolumeSpecName: "scripts") pod "09209678-fb3b-46a8-b3a4-7c526eb785d0" (UID: "09209678-fb3b-46a8-b3a4-7c526eb785d0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.520802 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09209678-fb3b-46a8-b3a4-7c526eb785d0-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "09209678-fb3b-46a8-b3a4-7c526eb785d0" (UID: "09209678-fb3b-46a8-b3a4-7c526eb785d0"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.593377 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09209678-fb3b-46a8-b3a4-7c526eb785d0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "09209678-fb3b-46a8-b3a4-7c526eb785d0" (UID: "09209678-fb3b-46a8-b3a4-7c526eb785d0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.599324 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09209678-fb3b-46a8-b3a4-7c526eb785d0-config-data" (OuterVolumeSpecName: "config-data") pod "09209678-fb3b-46a8-b3a4-7c526eb785d0" (UID: "09209678-fb3b-46a8-b3a4-7c526eb785d0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.612275 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/980dd73e-ec79-4751-ad16-7e95c4212336-combined-ca-bundle\") pod \"980dd73e-ec79-4751-ad16-7e95c4212336\" (UID: \"980dd73e-ec79-4751-ad16-7e95c4212336\") " Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.612548 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/980dd73e-ec79-4751-ad16-7e95c4212336-logs\") pod \"980dd73e-ec79-4751-ad16-7e95c4212336\" (UID: \"980dd73e-ec79-4751-ad16-7e95c4212336\") " Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.612625 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/980dd73e-ec79-4751-ad16-7e95c4212336-custom-prometheus-ca\") pod \"980dd73e-ec79-4751-ad16-7e95c4212336\" (UID: \"980dd73e-ec79-4751-ad16-7e95c4212336\") " Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.612659 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9d5nj\" (UniqueName: \"kubernetes.io/projected/980dd73e-ec79-4751-ad16-7e95c4212336-kube-api-access-9d5nj\") pod \"980dd73e-ec79-4751-ad16-7e95c4212336\" (UID: \"980dd73e-ec79-4751-ad16-7e95c4212336\") " Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.612742 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/980dd73e-ec79-4751-ad16-7e95c4212336-config-data\") pod \"980dd73e-ec79-4751-ad16-7e95c4212336\" (UID: \"980dd73e-ec79-4751-ad16-7e95c4212336\") " Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.613296 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09209678-fb3b-46a8-b3a4-7c526eb785d0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.613325 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-64hmq\" (UniqueName: \"kubernetes.io/projected/09209678-fb3b-46a8-b3a4-7c526eb785d0-kube-api-access-64hmq\") on node \"crc\" DevicePath \"\"" Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.613339 4779 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/09209678-fb3b-46a8-b3a4-7c526eb785d0-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.613351 4779 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/09209678-fb3b-46a8-b3a4-7c526eb785d0-credential-keys\") on node \"crc\" DevicePath \"\"" Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.613363 4779 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/09209678-fb3b-46a8-b3a4-7c526eb785d0-fernet-keys\") on node \"crc\" DevicePath \"\"" Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.613373 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09209678-fb3b-46a8-b3a4-7c526eb785d0-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.614405 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/980dd73e-ec79-4751-ad16-7e95c4212336-logs" (OuterVolumeSpecName: "logs") pod "980dd73e-ec79-4751-ad16-7e95c4212336" (UID: "980dd73e-ec79-4751-ad16-7e95c4212336"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.617835 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/980dd73e-ec79-4751-ad16-7e95c4212336-kube-api-access-9d5nj" (OuterVolumeSpecName: "kube-api-access-9d5nj") pod "980dd73e-ec79-4751-ad16-7e95c4212336" (UID: "980dd73e-ec79-4751-ad16-7e95c4212336"). InnerVolumeSpecName "kube-api-access-9d5nj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.643065 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/980dd73e-ec79-4751-ad16-7e95c4212336-custom-prometheus-ca" (OuterVolumeSpecName: "custom-prometheus-ca") pod "980dd73e-ec79-4751-ad16-7e95c4212336" (UID: "980dd73e-ec79-4751-ad16-7e95c4212336"). InnerVolumeSpecName "custom-prometheus-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.650062 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/980dd73e-ec79-4751-ad16-7e95c4212336-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "980dd73e-ec79-4751-ad16-7e95c4212336" (UID: "980dd73e-ec79-4751-ad16-7e95c4212336"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.667682 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/980dd73e-ec79-4751-ad16-7e95c4212336-config-data" (OuterVolumeSpecName: "config-data") pod "980dd73e-ec79-4751-ad16-7e95c4212336" (UID: "980dd73e-ec79-4751-ad16-7e95c4212336"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.715290 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/980dd73e-ec79-4751-ad16-7e95c4212336-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.715323 4779 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/980dd73e-ec79-4751-ad16-7e95c4212336-logs\") on node \"crc\" DevicePath \"\"" Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.715333 4779 reconciler_common.go:293] "Volume detached for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/980dd73e-ec79-4751-ad16-7e95c4212336-custom-prometheus-ca\") on node \"crc\" DevicePath \"\"" Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.715343 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9d5nj\" (UniqueName: \"kubernetes.io/projected/980dd73e-ec79-4751-ad16-7e95c4212336-kube-api-access-9d5nj\") on node \"crc\" DevicePath \"\"" Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.715351 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/980dd73e-ec79-4751-ad16-7e95c4212336-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.734837 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"980dd73e-ec79-4751-ad16-7e95c4212336","Type":"ContainerDied","Data":"2c28c1666012045329a44733f36d6e6666a27ff3009ce63a9602ee2aa9d88264"} Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.734884 4779 scope.go:117] "RemoveContainer" containerID="f2a8438b8ffac394ddf09e52db5bd98307e8ebb4e9b6ff951fac9d9c11db30a2" Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.735052 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.745877 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-applier-0" event={"ID":"cdba510b-3621-4b0b-bdb1-ba1c455bcd4e","Type":"ContainerStarted","Data":"2dcdbb1a94c47be2591ea4abf54cee2a3c8cc747adfe577f64f879ed52599462"} Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.750665 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-fh8wz" event={"ID":"d52d2b76-d868-4f6d-ab27-20d1d8223952","Type":"ContainerStarted","Data":"467d51c085ba826129d6035eb1d48f077434ce2a2043285efbb674394301142e"} Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.757765 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-7nqcd" event={"ID":"09209678-fb3b-46a8-b3a4-7c526eb785d0","Type":"ContainerDied","Data":"136fdde37476b5992d690a90afda9fd94452f72d63168ab59a6e1cc6e2b5649b"} Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.757818 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="136fdde37476b5992d690a90afda9fd94452f72d63168ab59a6e1cc6e2b5649b" Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.757874 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-7nqcd" Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.767303 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"785e1b61-3f05-4ac1-ade7-57c25fe6f177","Type":"ContainerStarted","Data":"e476822b62ee12988f0721149f099ee1097a135a452a6fde2057bf3720bfd0ca"} Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.772926 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"f67e636b-969b-48ee-bbec-3d8b38b22274","Type":"ContainerStarted","Data":"f4db79b816cb5076e98d3f536fc9dff544415797bf8695d0d9264d3e25a07f7d"} Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.780766 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-applier-0" podStartSLOduration=4.05897498 podStartE2EDuration="11.780740832s" podCreationTimestamp="2025-09-29 09:46:43 +0000 UTC" firstStartedPulling="2025-09-29 09:46:46.360789769 +0000 UTC m=+1038.342113673" lastFinishedPulling="2025-09-29 09:46:54.082555621 +0000 UTC m=+1046.063879525" observedRunningTime="2025-09-29 09:46:54.766016977 +0000 UTC m=+1046.747340881" watchObservedRunningTime="2025-09-29 09:46:54.780740832 +0000 UTC m=+1046.762064736" Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.783095 4779 scope.go:117] "RemoveContainer" containerID="49642ae0081bfac6765277a35635b1f996ecd19270eb87473540b952c15ad91e" Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.796063 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-api-0"] Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.839146 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/watcher-api-0"] Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.881120 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-api-0"] Sep 29 09:46:54 crc kubenswrapper[4779]: E0929 09:46:54.884474 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e274f651-2029-48e9-9142-bb141ebdf551" containerName="mariadb-account-create" Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.884500 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="e274f651-2029-48e9-9142-bb141ebdf551" containerName="mariadb-account-create" Sep 29 09:46:54 crc kubenswrapper[4779]: E0929 09:46:54.884524 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09209678-fb3b-46a8-b3a4-7c526eb785d0" containerName="keystone-bootstrap" Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.884533 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="09209678-fb3b-46a8-b3a4-7c526eb785d0" containerName="keystone-bootstrap" Sep 29 09:46:54 crc kubenswrapper[4779]: E0929 09:46:54.884543 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f74fcce-3141-4e78-a238-d10f3ff03a11" containerName="mariadb-account-create" Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.884549 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f74fcce-3141-4e78-a238-d10f3ff03a11" containerName="mariadb-account-create" Sep 29 09:46:54 crc kubenswrapper[4779]: E0929 09:46:54.884562 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="980dd73e-ec79-4751-ad16-7e95c4212336" containerName="watcher-api-log" Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.884569 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="980dd73e-ec79-4751-ad16-7e95c4212336" containerName="watcher-api-log" Sep 29 09:46:54 crc kubenswrapper[4779]: E0929 09:46:54.884582 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="980dd73e-ec79-4751-ad16-7e95c4212336" containerName="watcher-api" Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.884588 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="980dd73e-ec79-4751-ad16-7e95c4212336" containerName="watcher-api" Sep 29 09:46:54 crc kubenswrapper[4779]: E0929 09:46:54.884604 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="524f6892-467d-4f42-b81a-5b4dbae841fd" containerName="init" Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.884610 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="524f6892-467d-4f42-b81a-5b4dbae841fd" containerName="init" Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.885424 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="524f6892-467d-4f42-b81a-5b4dbae841fd" containerName="init" Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.885453 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="09209678-fb3b-46a8-b3a4-7c526eb785d0" containerName="keystone-bootstrap" Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.887883 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f74fcce-3141-4e78-a238-d10f3ff03a11" containerName="mariadb-account-create" Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.887932 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="980dd73e-ec79-4751-ad16-7e95c4212336" containerName="watcher-api-log" Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.887945 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="980dd73e-ec79-4751-ad16-7e95c4212336" containerName="watcher-api" Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.887954 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="e274f651-2029-48e9-9142-bb141ebdf551" containerName="mariadb-account-create" Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.889743 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-fh8wz" podStartSLOduration=3.703605406 podStartE2EDuration="10.889727946s" podCreationTimestamp="2025-09-29 09:46:44 +0000 UTC" firstStartedPulling="2025-09-29 09:46:46.905105541 +0000 UTC m=+1038.886429445" lastFinishedPulling="2025-09-29 09:46:54.091228081 +0000 UTC m=+1046.072551985" observedRunningTime="2025-09-29 09:46:54.799725319 +0000 UTC m=+1046.781049243" watchObservedRunningTime="2025-09-29 09:46:54.889727946 +0000 UTC m=+1046.871051850" Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.890523 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.893565 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-api-config-data" Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.915133 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-api-0"] Sep 29 09:46:54 crc kubenswrapper[4779]: I0929 09:46:54.920489 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-decision-engine-0" podStartSLOduration=4.122244345 podStartE2EDuration="11.920472473s" podCreationTimestamp="2025-09-29 09:46:43 +0000 UTC" firstStartedPulling="2025-09-29 09:46:46.285036253 +0000 UTC m=+1038.266360157" lastFinishedPulling="2025-09-29 09:46:54.083264381 +0000 UTC m=+1046.064588285" observedRunningTime="2025-09-29 09:46:54.840967759 +0000 UTC m=+1046.822291673" watchObservedRunningTime="2025-09-29 09:46:54.920472473 +0000 UTC m=+1046.901796367" Sep 29 09:46:55 crc kubenswrapper[4779]: I0929 09:46:55.022823 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/2d380bf6-3887-44a2-91b0-b34c376d1ee6-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"2d380bf6-3887-44a2-91b0-b34c376d1ee6\") " pod="openstack/watcher-api-0" Sep 29 09:46:55 crc kubenswrapper[4779]: I0929 09:46:55.022874 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d380bf6-3887-44a2-91b0-b34c376d1ee6-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"2d380bf6-3887-44a2-91b0-b34c376d1ee6\") " pod="openstack/watcher-api-0" Sep 29 09:46:55 crc kubenswrapper[4779]: I0929 09:46:55.022934 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5xxsv\" (UniqueName: \"kubernetes.io/projected/2d380bf6-3887-44a2-91b0-b34c376d1ee6-kube-api-access-5xxsv\") pod \"watcher-api-0\" (UID: \"2d380bf6-3887-44a2-91b0-b34c376d1ee6\") " pod="openstack/watcher-api-0" Sep 29 09:46:55 crc kubenswrapper[4779]: I0929 09:46:55.023073 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d380bf6-3887-44a2-91b0-b34c376d1ee6-config-data\") pod \"watcher-api-0\" (UID: \"2d380bf6-3887-44a2-91b0-b34c376d1ee6\") " pod="openstack/watcher-api-0" Sep 29 09:46:55 crc kubenswrapper[4779]: I0929 09:46:55.023129 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2d380bf6-3887-44a2-91b0-b34c376d1ee6-logs\") pod \"watcher-api-0\" (UID: \"2d380bf6-3887-44a2-91b0-b34c376d1ee6\") " pod="openstack/watcher-api-0" Sep 29 09:46:55 crc kubenswrapper[4779]: I0929 09:46:55.125232 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d380bf6-3887-44a2-91b0-b34c376d1ee6-config-data\") pod \"watcher-api-0\" (UID: \"2d380bf6-3887-44a2-91b0-b34c376d1ee6\") " pod="openstack/watcher-api-0" Sep 29 09:46:55 crc kubenswrapper[4779]: I0929 09:46:55.125377 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2d380bf6-3887-44a2-91b0-b34c376d1ee6-logs\") pod \"watcher-api-0\" (UID: \"2d380bf6-3887-44a2-91b0-b34c376d1ee6\") " pod="openstack/watcher-api-0" Sep 29 09:46:55 crc kubenswrapper[4779]: I0929 09:46:55.125423 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/2d380bf6-3887-44a2-91b0-b34c376d1ee6-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"2d380bf6-3887-44a2-91b0-b34c376d1ee6\") " pod="openstack/watcher-api-0" Sep 29 09:46:55 crc kubenswrapper[4779]: I0929 09:46:55.125456 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d380bf6-3887-44a2-91b0-b34c376d1ee6-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"2d380bf6-3887-44a2-91b0-b34c376d1ee6\") " pod="openstack/watcher-api-0" Sep 29 09:46:55 crc kubenswrapper[4779]: I0929 09:46:55.125511 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5xxsv\" (UniqueName: \"kubernetes.io/projected/2d380bf6-3887-44a2-91b0-b34c376d1ee6-kube-api-access-5xxsv\") pod \"watcher-api-0\" (UID: \"2d380bf6-3887-44a2-91b0-b34c376d1ee6\") " pod="openstack/watcher-api-0" Sep 29 09:46:55 crc kubenswrapper[4779]: I0929 09:46:55.125801 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2d380bf6-3887-44a2-91b0-b34c376d1ee6-logs\") pod \"watcher-api-0\" (UID: \"2d380bf6-3887-44a2-91b0-b34c376d1ee6\") " pod="openstack/watcher-api-0" Sep 29 09:46:55 crc kubenswrapper[4779]: I0929 09:46:55.130619 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d380bf6-3887-44a2-91b0-b34c376d1ee6-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"2d380bf6-3887-44a2-91b0-b34c376d1ee6\") " pod="openstack/watcher-api-0" Sep 29 09:46:55 crc kubenswrapper[4779]: I0929 09:46:55.131228 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d380bf6-3887-44a2-91b0-b34c376d1ee6-config-data\") pod \"watcher-api-0\" (UID: \"2d380bf6-3887-44a2-91b0-b34c376d1ee6\") " pod="openstack/watcher-api-0" Sep 29 09:46:55 crc kubenswrapper[4779]: I0929 09:46:55.133387 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/2d380bf6-3887-44a2-91b0-b34c376d1ee6-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"2d380bf6-3887-44a2-91b0-b34c376d1ee6\") " pod="openstack/watcher-api-0" Sep 29 09:46:55 crc kubenswrapper[4779]: I0929 09:46:55.143567 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5xxsv\" (UniqueName: \"kubernetes.io/projected/2d380bf6-3887-44a2-91b0-b34c376d1ee6-kube-api-access-5xxsv\") pod \"watcher-api-0\" (UID: \"2d380bf6-3887-44a2-91b0-b34c376d1ee6\") " pod="openstack/watcher-api-0" Sep 29 09:46:55 crc kubenswrapper[4779]: I0929 09:46:55.215970 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Sep 29 09:46:55 crc kubenswrapper[4779]: I0929 09:46:55.527312 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-7nqcd"] Sep 29 09:46:55 crc kubenswrapper[4779]: I0929 09:46:55.537961 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-7nqcd"] Sep 29 09:46:55 crc kubenswrapper[4779]: I0929 09:46:55.623140 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-kscbf"] Sep 29 09:46:55 crc kubenswrapper[4779]: I0929 09:46:55.626669 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-kscbf" Sep 29 09:46:55 crc kubenswrapper[4779]: I0929 09:46:55.629607 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-n7qtr" Sep 29 09:46:55 crc kubenswrapper[4779]: I0929 09:46:55.629765 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 29 09:46:55 crc kubenswrapper[4779]: I0929 09:46:55.629926 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 29 09:46:55 crc kubenswrapper[4779]: I0929 09:46:55.630059 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 29 09:46:55 crc kubenswrapper[4779]: I0929 09:46:55.637996 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-kscbf"] Sep 29 09:46:55 crc kubenswrapper[4779]: I0929 09:46:55.744223 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1b1b89f6-8491-4d78-bfb2-7959e2f6765d-credential-keys\") pod \"keystone-bootstrap-kscbf\" (UID: \"1b1b89f6-8491-4d78-bfb2-7959e2f6765d\") " pod="openstack/keystone-bootstrap-kscbf" Sep 29 09:46:55 crc kubenswrapper[4779]: I0929 09:46:55.744270 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tgb2d\" (UniqueName: \"kubernetes.io/projected/1b1b89f6-8491-4d78-bfb2-7959e2f6765d-kube-api-access-tgb2d\") pod \"keystone-bootstrap-kscbf\" (UID: \"1b1b89f6-8491-4d78-bfb2-7959e2f6765d\") " pod="openstack/keystone-bootstrap-kscbf" Sep 29 09:46:55 crc kubenswrapper[4779]: I0929 09:46:55.744299 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1b1b89f6-8491-4d78-bfb2-7959e2f6765d-config-data\") pod \"keystone-bootstrap-kscbf\" (UID: \"1b1b89f6-8491-4d78-bfb2-7959e2f6765d\") " pod="openstack/keystone-bootstrap-kscbf" Sep 29 09:46:55 crc kubenswrapper[4779]: I0929 09:46:55.744326 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b1b89f6-8491-4d78-bfb2-7959e2f6765d-combined-ca-bundle\") pod \"keystone-bootstrap-kscbf\" (UID: \"1b1b89f6-8491-4d78-bfb2-7959e2f6765d\") " pod="openstack/keystone-bootstrap-kscbf" Sep 29 09:46:55 crc kubenswrapper[4779]: I0929 09:46:55.744455 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1b1b89f6-8491-4d78-bfb2-7959e2f6765d-scripts\") pod \"keystone-bootstrap-kscbf\" (UID: \"1b1b89f6-8491-4d78-bfb2-7959e2f6765d\") " pod="openstack/keystone-bootstrap-kscbf" Sep 29 09:46:55 crc kubenswrapper[4779]: I0929 09:46:55.744648 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1b1b89f6-8491-4d78-bfb2-7959e2f6765d-fernet-keys\") pod \"keystone-bootstrap-kscbf\" (UID: \"1b1b89f6-8491-4d78-bfb2-7959e2f6765d\") " pod="openstack/keystone-bootstrap-kscbf" Sep 29 09:46:55 crc kubenswrapper[4779]: I0929 09:46:55.846319 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1b1b89f6-8491-4d78-bfb2-7959e2f6765d-credential-keys\") pod \"keystone-bootstrap-kscbf\" (UID: \"1b1b89f6-8491-4d78-bfb2-7959e2f6765d\") " pod="openstack/keystone-bootstrap-kscbf" Sep 29 09:46:55 crc kubenswrapper[4779]: I0929 09:46:55.846360 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tgb2d\" (UniqueName: \"kubernetes.io/projected/1b1b89f6-8491-4d78-bfb2-7959e2f6765d-kube-api-access-tgb2d\") pod \"keystone-bootstrap-kscbf\" (UID: \"1b1b89f6-8491-4d78-bfb2-7959e2f6765d\") " pod="openstack/keystone-bootstrap-kscbf" Sep 29 09:46:55 crc kubenswrapper[4779]: I0929 09:46:55.846389 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1b1b89f6-8491-4d78-bfb2-7959e2f6765d-config-data\") pod \"keystone-bootstrap-kscbf\" (UID: \"1b1b89f6-8491-4d78-bfb2-7959e2f6765d\") " pod="openstack/keystone-bootstrap-kscbf" Sep 29 09:46:55 crc kubenswrapper[4779]: I0929 09:46:55.846414 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b1b89f6-8491-4d78-bfb2-7959e2f6765d-combined-ca-bundle\") pod \"keystone-bootstrap-kscbf\" (UID: \"1b1b89f6-8491-4d78-bfb2-7959e2f6765d\") " pod="openstack/keystone-bootstrap-kscbf" Sep 29 09:46:55 crc kubenswrapper[4779]: I0929 09:46:55.846477 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1b1b89f6-8491-4d78-bfb2-7959e2f6765d-scripts\") pod \"keystone-bootstrap-kscbf\" (UID: \"1b1b89f6-8491-4d78-bfb2-7959e2f6765d\") " pod="openstack/keystone-bootstrap-kscbf" Sep 29 09:46:55 crc kubenswrapper[4779]: I0929 09:46:55.846596 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1b1b89f6-8491-4d78-bfb2-7959e2f6765d-fernet-keys\") pod \"keystone-bootstrap-kscbf\" (UID: \"1b1b89f6-8491-4d78-bfb2-7959e2f6765d\") " pod="openstack/keystone-bootstrap-kscbf" Sep 29 09:46:55 crc kubenswrapper[4779]: I0929 09:46:55.856131 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1b1b89f6-8491-4d78-bfb2-7959e2f6765d-credential-keys\") pod \"keystone-bootstrap-kscbf\" (UID: \"1b1b89f6-8491-4d78-bfb2-7959e2f6765d\") " pod="openstack/keystone-bootstrap-kscbf" Sep 29 09:46:55 crc kubenswrapper[4779]: I0929 09:46:55.856504 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1b1b89f6-8491-4d78-bfb2-7959e2f6765d-fernet-keys\") pod \"keystone-bootstrap-kscbf\" (UID: \"1b1b89f6-8491-4d78-bfb2-7959e2f6765d\") " pod="openstack/keystone-bootstrap-kscbf" Sep 29 09:46:55 crc kubenswrapper[4779]: I0929 09:46:55.856684 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1b1b89f6-8491-4d78-bfb2-7959e2f6765d-scripts\") pod \"keystone-bootstrap-kscbf\" (UID: \"1b1b89f6-8491-4d78-bfb2-7959e2f6765d\") " pod="openstack/keystone-bootstrap-kscbf" Sep 29 09:46:55 crc kubenswrapper[4779]: I0929 09:46:55.857258 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1b1b89f6-8491-4d78-bfb2-7959e2f6765d-config-data\") pod \"keystone-bootstrap-kscbf\" (UID: \"1b1b89f6-8491-4d78-bfb2-7959e2f6765d\") " pod="openstack/keystone-bootstrap-kscbf" Sep 29 09:46:55 crc kubenswrapper[4779]: I0929 09:46:55.860214 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b1b89f6-8491-4d78-bfb2-7959e2f6765d-combined-ca-bundle\") pod \"keystone-bootstrap-kscbf\" (UID: \"1b1b89f6-8491-4d78-bfb2-7959e2f6765d\") " pod="openstack/keystone-bootstrap-kscbf" Sep 29 09:46:55 crc kubenswrapper[4779]: I0929 09:46:55.868399 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tgb2d\" (UniqueName: \"kubernetes.io/projected/1b1b89f6-8491-4d78-bfb2-7959e2f6765d-kube-api-access-tgb2d\") pod \"keystone-bootstrap-kscbf\" (UID: \"1b1b89f6-8491-4d78-bfb2-7959e2f6765d\") " pod="openstack/keystone-bootstrap-kscbf" Sep 29 09:46:55 crc kubenswrapper[4779]: I0929 09:46:55.931589 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-9l6pm"] Sep 29 09:46:55 crc kubenswrapper[4779]: I0929 09:46:55.932642 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-9l6pm" Sep 29 09:46:55 crc kubenswrapper[4779]: I0929 09:46:55.936006 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-rszr5" Sep 29 09:46:55 crc kubenswrapper[4779]: I0929 09:46:55.936264 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Sep 29 09:46:55 crc kubenswrapper[4779]: I0929 09:46:55.951335 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-kscbf" Sep 29 09:46:55 crc kubenswrapper[4779]: I0929 09:46:55.981823 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-9l6pm"] Sep 29 09:46:56 crc kubenswrapper[4779]: I0929 09:46:56.035954 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-r5dk2"] Sep 29 09:46:56 crc kubenswrapper[4779]: I0929 09:46:56.037387 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-r5dk2" Sep 29 09:46:56 crc kubenswrapper[4779]: I0929 09:46:56.040808 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Sep 29 09:46:56 crc kubenswrapper[4779]: I0929 09:46:56.040845 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-wtz7n" Sep 29 09:46:56 crc kubenswrapper[4779]: I0929 09:46:56.041021 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Sep 29 09:46:56 crc kubenswrapper[4779]: I0929 09:46:56.044230 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-r5dk2"] Sep 29 09:46:56 crc kubenswrapper[4779]: I0929 09:46:56.051076 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9pgjq\" (UniqueName: \"kubernetes.io/projected/e5102d17-8f46-47e2-8a21-d8afec790069-kube-api-access-9pgjq\") pod \"barbican-db-sync-9l6pm\" (UID: \"e5102d17-8f46-47e2-8a21-d8afec790069\") " pod="openstack/barbican-db-sync-9l6pm" Sep 29 09:46:56 crc kubenswrapper[4779]: I0929 09:46:56.051136 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5102d17-8f46-47e2-8a21-d8afec790069-combined-ca-bundle\") pod \"barbican-db-sync-9l6pm\" (UID: \"e5102d17-8f46-47e2-8a21-d8afec790069\") " pod="openstack/barbican-db-sync-9l6pm" Sep 29 09:46:56 crc kubenswrapper[4779]: I0929 09:46:56.051165 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e5102d17-8f46-47e2-8a21-d8afec790069-db-sync-config-data\") pod \"barbican-db-sync-9l6pm\" (UID: \"e5102d17-8f46-47e2-8a21-d8afec790069\") " pod="openstack/barbican-db-sync-9l6pm" Sep 29 09:46:56 crc kubenswrapper[4779]: I0929 09:46:56.156352 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9pgjq\" (UniqueName: \"kubernetes.io/projected/e5102d17-8f46-47e2-8a21-d8afec790069-kube-api-access-9pgjq\") pod \"barbican-db-sync-9l6pm\" (UID: \"e5102d17-8f46-47e2-8a21-d8afec790069\") " pod="openstack/barbican-db-sync-9l6pm" Sep 29 09:46:56 crc kubenswrapper[4779]: I0929 09:46:56.156427 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zq7d5\" (UniqueName: \"kubernetes.io/projected/ced430a8-7031-48a6-a86e-e827ef13b166-kube-api-access-zq7d5\") pod \"neutron-db-sync-r5dk2\" (UID: \"ced430a8-7031-48a6-a86e-e827ef13b166\") " pod="openstack/neutron-db-sync-r5dk2" Sep 29 09:46:56 crc kubenswrapper[4779]: I0929 09:46:56.156465 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5102d17-8f46-47e2-8a21-d8afec790069-combined-ca-bundle\") pod \"barbican-db-sync-9l6pm\" (UID: \"e5102d17-8f46-47e2-8a21-d8afec790069\") " pod="openstack/barbican-db-sync-9l6pm" Sep 29 09:46:56 crc kubenswrapper[4779]: I0929 09:46:56.156499 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e5102d17-8f46-47e2-8a21-d8afec790069-db-sync-config-data\") pod \"barbican-db-sync-9l6pm\" (UID: \"e5102d17-8f46-47e2-8a21-d8afec790069\") " pod="openstack/barbican-db-sync-9l6pm" Sep 29 09:46:56 crc kubenswrapper[4779]: I0929 09:46:56.156613 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ced430a8-7031-48a6-a86e-e827ef13b166-combined-ca-bundle\") pod \"neutron-db-sync-r5dk2\" (UID: \"ced430a8-7031-48a6-a86e-e827ef13b166\") " pod="openstack/neutron-db-sync-r5dk2" Sep 29 09:46:56 crc kubenswrapper[4779]: I0929 09:46:56.156644 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/ced430a8-7031-48a6-a86e-e827ef13b166-config\") pod \"neutron-db-sync-r5dk2\" (UID: \"ced430a8-7031-48a6-a86e-e827ef13b166\") " pod="openstack/neutron-db-sync-r5dk2" Sep 29 09:46:56 crc kubenswrapper[4779]: I0929 09:46:56.166035 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5102d17-8f46-47e2-8a21-d8afec790069-combined-ca-bundle\") pod \"barbican-db-sync-9l6pm\" (UID: \"e5102d17-8f46-47e2-8a21-d8afec790069\") " pod="openstack/barbican-db-sync-9l6pm" Sep 29 09:46:56 crc kubenswrapper[4779]: I0929 09:46:56.169392 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e5102d17-8f46-47e2-8a21-d8afec790069-db-sync-config-data\") pod \"barbican-db-sync-9l6pm\" (UID: \"e5102d17-8f46-47e2-8a21-d8afec790069\") " pod="openstack/barbican-db-sync-9l6pm" Sep 29 09:46:56 crc kubenswrapper[4779]: I0929 09:46:56.179506 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9pgjq\" (UniqueName: \"kubernetes.io/projected/e5102d17-8f46-47e2-8a21-d8afec790069-kube-api-access-9pgjq\") pod \"barbican-db-sync-9l6pm\" (UID: \"e5102d17-8f46-47e2-8a21-d8afec790069\") " pod="openstack/barbican-db-sync-9l6pm" Sep 29 09:46:56 crc kubenswrapper[4779]: I0929 09:46:56.258716 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ced430a8-7031-48a6-a86e-e827ef13b166-combined-ca-bundle\") pod \"neutron-db-sync-r5dk2\" (UID: \"ced430a8-7031-48a6-a86e-e827ef13b166\") " pod="openstack/neutron-db-sync-r5dk2" Sep 29 09:46:56 crc kubenswrapper[4779]: I0929 09:46:56.258763 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/ced430a8-7031-48a6-a86e-e827ef13b166-config\") pod \"neutron-db-sync-r5dk2\" (UID: \"ced430a8-7031-48a6-a86e-e827ef13b166\") " pod="openstack/neutron-db-sync-r5dk2" Sep 29 09:46:56 crc kubenswrapper[4779]: I0929 09:46:56.258833 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zq7d5\" (UniqueName: \"kubernetes.io/projected/ced430a8-7031-48a6-a86e-e827ef13b166-kube-api-access-zq7d5\") pod \"neutron-db-sync-r5dk2\" (UID: \"ced430a8-7031-48a6-a86e-e827ef13b166\") " pod="openstack/neutron-db-sync-r5dk2" Sep 29 09:46:56 crc kubenswrapper[4779]: I0929 09:46:56.262997 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ced430a8-7031-48a6-a86e-e827ef13b166-combined-ca-bundle\") pod \"neutron-db-sync-r5dk2\" (UID: \"ced430a8-7031-48a6-a86e-e827ef13b166\") " pod="openstack/neutron-db-sync-r5dk2" Sep 29 09:46:56 crc kubenswrapper[4779]: I0929 09:46:56.264267 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/ced430a8-7031-48a6-a86e-e827ef13b166-config\") pod \"neutron-db-sync-r5dk2\" (UID: \"ced430a8-7031-48a6-a86e-e827ef13b166\") " pod="openstack/neutron-db-sync-r5dk2" Sep 29 09:46:56 crc kubenswrapper[4779]: I0929 09:46:56.273857 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zq7d5\" (UniqueName: \"kubernetes.io/projected/ced430a8-7031-48a6-a86e-e827ef13b166-kube-api-access-zq7d5\") pod \"neutron-db-sync-r5dk2\" (UID: \"ced430a8-7031-48a6-a86e-e827ef13b166\") " pod="openstack/neutron-db-sync-r5dk2" Sep 29 09:46:56 crc kubenswrapper[4779]: I0929 09:46:56.295007 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-9l6pm" Sep 29 09:46:56 crc kubenswrapper[4779]: I0929 09:46:56.362786 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-r5dk2" Sep 29 09:46:56 crc kubenswrapper[4779]: I0929 09:46:56.733383 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09209678-fb3b-46a8-b3a4-7c526eb785d0" path="/var/lib/kubelet/pods/09209678-fb3b-46a8-b3a4-7c526eb785d0/volumes" Sep 29 09:46:56 crc kubenswrapper[4779]: I0929 09:46:56.734333 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="980dd73e-ec79-4751-ad16-7e95c4212336" path="/var/lib/kubelet/pods/980dd73e-ec79-4751-ad16-7e95c4212336/volumes" Sep 29 09:46:59 crc kubenswrapper[4779]: I0929 09:46:59.300592 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/watcher-api-0" podUID="980dd73e-ec79-4751-ad16-7e95c4212336" containerName="watcher-api" probeResult="failure" output="Get \"http://10.217.0.143:9322/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 09:46:59 crc kubenswrapper[4779]: I0929 09:46:59.333085 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-applier-0" Sep 29 09:46:59 crc kubenswrapper[4779]: I0929 09:46:59.584093 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6ffc8d9cff-x6d6w" Sep 29 09:46:59 crc kubenswrapper[4779]: I0929 09:46:59.633255 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-676856544c-dqj7w"] Sep 29 09:46:59 crc kubenswrapper[4779]: I0929 09:46:59.633544 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-676856544c-dqj7w" podUID="9f5b067c-e255-4746-abd6-88e8ec9d8a85" containerName="dnsmasq-dns" containerID="cri-o://d81578ca79f4ece4b43a97ad32ad5185b8172d4d56bb7e9fbf278ab74214d0b6" gracePeriod=10 Sep 29 09:46:59 crc kubenswrapper[4779]: I0929 09:46:59.833079 4779 generic.go:334] "Generic (PLEG): container finished" podID="9f5b067c-e255-4746-abd6-88e8ec9d8a85" containerID="d81578ca79f4ece4b43a97ad32ad5185b8172d4d56bb7e9fbf278ab74214d0b6" exitCode=0 Sep 29 09:46:59 crc kubenswrapper[4779]: I0929 09:46:59.833156 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-676856544c-dqj7w" event={"ID":"9f5b067c-e255-4746-abd6-88e8ec9d8a85","Type":"ContainerDied","Data":"d81578ca79f4ece4b43a97ad32ad5185b8172d4d56bb7e9fbf278ab74214d0b6"} Sep 29 09:46:59 crc kubenswrapper[4779]: I0929 09:46:59.974273 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-676856544c-dqj7w" podUID="9f5b067c-e255-4746-abd6-88e8ec9d8a85" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.121:5353: connect: connection refused" Sep 29 09:47:00 crc kubenswrapper[4779]: I0929 09:47:00.842857 4779 generic.go:334] "Generic (PLEG): container finished" podID="f67e636b-969b-48ee-bbec-3d8b38b22274" containerID="f4db79b816cb5076e98d3f536fc9dff544415797bf8695d0d9264d3e25a07f7d" exitCode=1 Sep 29 09:47:00 crc kubenswrapper[4779]: I0929 09:47:00.842894 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"f67e636b-969b-48ee-bbec-3d8b38b22274","Type":"ContainerDied","Data":"f4db79b816cb5076e98d3f536fc9dff544415797bf8695d0d9264d3e25a07f7d"} Sep 29 09:47:00 crc kubenswrapper[4779]: I0929 09:47:00.843566 4779 scope.go:117] "RemoveContainer" containerID="f4db79b816cb5076e98d3f536fc9dff544415797bf8695d0d9264d3e25a07f7d" Sep 29 09:47:01 crc kubenswrapper[4779]: I0929 09:47:01.853204 4779 generic.go:334] "Generic (PLEG): container finished" podID="d52d2b76-d868-4f6d-ab27-20d1d8223952" containerID="467d51c085ba826129d6035eb1d48f077434ce2a2043285efbb674394301142e" exitCode=0 Sep 29 09:47:01 crc kubenswrapper[4779]: I0929 09:47:01.853240 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-fh8wz" event={"ID":"d52d2b76-d868-4f6d-ab27-20d1d8223952","Type":"ContainerDied","Data":"467d51c085ba826129d6035eb1d48f077434ce2a2043285efbb674394301142e"} Sep 29 09:47:04 crc kubenswrapper[4779]: I0929 09:47:04.292271 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Sep 29 09:47:04 crc kubenswrapper[4779]: I0929 09:47:04.292796 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Sep 29 09:47:04 crc kubenswrapper[4779]: I0929 09:47:04.332636 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-applier-0" Sep 29 09:47:04 crc kubenswrapper[4779]: I0929 09:47:04.357475 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/watcher-applier-0" Sep 29 09:47:04 crc kubenswrapper[4779]: I0929 09:47:04.712720 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-fh8wz" Sep 29 09:47:04 crc kubenswrapper[4779]: I0929 09:47:04.790807 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-676856544c-dqj7w" Sep 29 09:47:04 crc kubenswrapper[4779]: I0929 09:47:04.836884 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d52d2b76-d868-4f6d-ab27-20d1d8223952-combined-ca-bundle\") pod \"d52d2b76-d868-4f6d-ab27-20d1d8223952\" (UID: \"d52d2b76-d868-4f6d-ab27-20d1d8223952\") " Sep 29 09:47:04 crc kubenswrapper[4779]: I0929 09:47:04.836973 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d52d2b76-d868-4f6d-ab27-20d1d8223952-logs\") pod \"d52d2b76-d868-4f6d-ab27-20d1d8223952\" (UID: \"d52d2b76-d868-4f6d-ab27-20d1d8223952\") " Sep 29 09:47:04 crc kubenswrapper[4779]: I0929 09:47:04.836998 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-khr8g\" (UniqueName: \"kubernetes.io/projected/d52d2b76-d868-4f6d-ab27-20d1d8223952-kube-api-access-khr8g\") pod \"d52d2b76-d868-4f6d-ab27-20d1d8223952\" (UID: \"d52d2b76-d868-4f6d-ab27-20d1d8223952\") " Sep 29 09:47:04 crc kubenswrapper[4779]: I0929 09:47:04.837155 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d52d2b76-d868-4f6d-ab27-20d1d8223952-config-data\") pod \"d52d2b76-d868-4f6d-ab27-20d1d8223952\" (UID: \"d52d2b76-d868-4f6d-ab27-20d1d8223952\") " Sep 29 09:47:04 crc kubenswrapper[4779]: I0929 09:47:04.837172 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d52d2b76-d868-4f6d-ab27-20d1d8223952-scripts\") pod \"d52d2b76-d868-4f6d-ab27-20d1d8223952\" (UID: \"d52d2b76-d868-4f6d-ab27-20d1d8223952\") " Sep 29 09:47:04 crc kubenswrapper[4779]: I0929 09:47:04.839223 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d52d2b76-d868-4f6d-ab27-20d1d8223952-logs" (OuterVolumeSpecName: "logs") pod "d52d2b76-d868-4f6d-ab27-20d1d8223952" (UID: "d52d2b76-d868-4f6d-ab27-20d1d8223952"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:47:04 crc kubenswrapper[4779]: I0929 09:47:04.843223 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d52d2b76-d868-4f6d-ab27-20d1d8223952-kube-api-access-khr8g" (OuterVolumeSpecName: "kube-api-access-khr8g") pod "d52d2b76-d868-4f6d-ab27-20d1d8223952" (UID: "d52d2b76-d868-4f6d-ab27-20d1d8223952"). InnerVolumeSpecName "kube-api-access-khr8g". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:47:04 crc kubenswrapper[4779]: I0929 09:47:04.843508 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d52d2b76-d868-4f6d-ab27-20d1d8223952-scripts" (OuterVolumeSpecName: "scripts") pod "d52d2b76-d868-4f6d-ab27-20d1d8223952" (UID: "d52d2b76-d868-4f6d-ab27-20d1d8223952"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:47:04 crc kubenswrapper[4779]: I0929 09:47:04.864176 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d52d2b76-d868-4f6d-ab27-20d1d8223952-config-data" (OuterVolumeSpecName: "config-data") pod "d52d2b76-d868-4f6d-ab27-20d1d8223952" (UID: "d52d2b76-d868-4f6d-ab27-20d1d8223952"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:47:04 crc kubenswrapper[4779]: I0929 09:47:04.866699 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d52d2b76-d868-4f6d-ab27-20d1d8223952-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d52d2b76-d868-4f6d-ab27-20d1d8223952" (UID: "d52d2b76-d868-4f6d-ab27-20d1d8223952"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:47:04 crc kubenswrapper[4779]: I0929 09:47:04.879464 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-fh8wz" Sep 29 09:47:04 crc kubenswrapper[4779]: I0929 09:47:04.879463 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-fh8wz" event={"ID":"d52d2b76-d868-4f6d-ab27-20d1d8223952","Type":"ContainerDied","Data":"88fd8af55d559fde82abd60ce34ddbdfb628cb83a21c61f982d03bb2b4ea6a16"} Sep 29 09:47:04 crc kubenswrapper[4779]: I0929 09:47:04.879702 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="88fd8af55d559fde82abd60ce34ddbdfb628cb83a21c61f982d03bb2b4ea6a16" Sep 29 09:47:04 crc kubenswrapper[4779]: I0929 09:47:04.881475 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"785e1b61-3f05-4ac1-ade7-57c25fe6f177","Type":"ContainerStarted","Data":"54bdd4db49a7cf15f5f139bc7a5d6f441c6efde4ed6eb5046b3a9efd35c90b10"} Sep 29 09:47:04 crc kubenswrapper[4779]: I0929 09:47:04.883423 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-676856544c-dqj7w" event={"ID":"9f5b067c-e255-4746-abd6-88e8ec9d8a85","Type":"ContainerDied","Data":"4d0580a08d25f0167f381a7045060f4531a990dda726f495ea25802d3709c80d"} Sep 29 09:47:04 crc kubenswrapper[4779]: I0929 09:47:04.883465 4779 scope.go:117] "RemoveContainer" containerID="d81578ca79f4ece4b43a97ad32ad5185b8172d4d56bb7e9fbf278ab74214d0b6" Sep 29 09:47:04 crc kubenswrapper[4779]: I0929 09:47:04.883623 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-676856544c-dqj7w" Sep 29 09:47:04 crc kubenswrapper[4779]: I0929 09:47:04.885870 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"f67e636b-969b-48ee-bbec-3d8b38b22274","Type":"ContainerStarted","Data":"dc57f62b4e8aac95d8856e4547b1ab206d4fe4ef4ceeb48680216143e21ffd0b"} Sep 29 09:47:04 crc kubenswrapper[4779]: I0929 09:47:04.918579 4779 scope.go:117] "RemoveContainer" containerID="d561d8a39157be803a0cc8a06b5cd5f4f8755b1030549b63d1872bbb397f536b" Sep 29 09:47:04 crc kubenswrapper[4779]: I0929 09:47:04.935673 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-applier-0" Sep 29 09:47:04 crc kubenswrapper[4779]: I0929 09:47:04.938822 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q655t\" (UniqueName: \"kubernetes.io/projected/9f5b067c-e255-4746-abd6-88e8ec9d8a85-kube-api-access-q655t\") pod \"9f5b067c-e255-4746-abd6-88e8ec9d8a85\" (UID: \"9f5b067c-e255-4746-abd6-88e8ec9d8a85\") " Sep 29 09:47:04 crc kubenswrapper[4779]: I0929 09:47:04.939125 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9f5b067c-e255-4746-abd6-88e8ec9d8a85-ovsdbserver-nb\") pod \"9f5b067c-e255-4746-abd6-88e8ec9d8a85\" (UID: \"9f5b067c-e255-4746-abd6-88e8ec9d8a85\") " Sep 29 09:47:04 crc kubenswrapper[4779]: I0929 09:47:04.939165 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f5b067c-e255-4746-abd6-88e8ec9d8a85-config\") pod \"9f5b067c-e255-4746-abd6-88e8ec9d8a85\" (UID: \"9f5b067c-e255-4746-abd6-88e8ec9d8a85\") " Sep 29 09:47:04 crc kubenswrapper[4779]: I0929 09:47:04.939225 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9f5b067c-e255-4746-abd6-88e8ec9d8a85-dns-svc\") pod \"9f5b067c-e255-4746-abd6-88e8ec9d8a85\" (UID: \"9f5b067c-e255-4746-abd6-88e8ec9d8a85\") " Sep 29 09:47:04 crc kubenswrapper[4779]: I0929 09:47:04.939259 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9f5b067c-e255-4746-abd6-88e8ec9d8a85-ovsdbserver-sb\") pod \"9f5b067c-e255-4746-abd6-88e8ec9d8a85\" (UID: \"9f5b067c-e255-4746-abd6-88e8ec9d8a85\") " Sep 29 09:47:04 crc kubenswrapper[4779]: I0929 09:47:04.939812 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d52d2b76-d868-4f6d-ab27-20d1d8223952-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:04 crc kubenswrapper[4779]: I0929 09:47:04.939840 4779 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d52d2b76-d868-4f6d-ab27-20d1d8223952-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:04 crc kubenswrapper[4779]: I0929 09:47:04.939854 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d52d2b76-d868-4f6d-ab27-20d1d8223952-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:04 crc kubenswrapper[4779]: I0929 09:47:04.939867 4779 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d52d2b76-d868-4f6d-ab27-20d1d8223952-logs\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:04 crc kubenswrapper[4779]: I0929 09:47:04.939880 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-khr8g\" (UniqueName: \"kubernetes.io/projected/d52d2b76-d868-4f6d-ab27-20d1d8223952-kube-api-access-khr8g\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:04 crc kubenswrapper[4779]: I0929 09:47:04.948223 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9f5b067c-e255-4746-abd6-88e8ec9d8a85-kube-api-access-q655t" (OuterVolumeSpecName: "kube-api-access-q655t") pod "9f5b067c-e255-4746-abd6-88e8ec9d8a85" (UID: "9f5b067c-e255-4746-abd6-88e8ec9d8a85"). InnerVolumeSpecName "kube-api-access-q655t". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:47:04 crc kubenswrapper[4779]: I0929 09:47:04.998069 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-applier-0"] Sep 29 09:47:05 crc kubenswrapper[4779]: I0929 09:47:05.001599 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9f5b067c-e255-4746-abd6-88e8ec9d8a85-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "9f5b067c-e255-4746-abd6-88e8ec9d8a85" (UID: "9f5b067c-e255-4746-abd6-88e8ec9d8a85"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:47:05 crc kubenswrapper[4779]: I0929 09:47:05.012564 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9f5b067c-e255-4746-abd6-88e8ec9d8a85-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "9f5b067c-e255-4746-abd6-88e8ec9d8a85" (UID: "9f5b067c-e255-4746-abd6-88e8ec9d8a85"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:47:05 crc kubenswrapper[4779]: I0929 09:47:05.016439 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9f5b067c-e255-4746-abd6-88e8ec9d8a85-config" (OuterVolumeSpecName: "config") pod "9f5b067c-e255-4746-abd6-88e8ec9d8a85" (UID: "9f5b067c-e255-4746-abd6-88e8ec9d8a85"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:47:05 crc kubenswrapper[4779]: I0929 09:47:05.030140 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9f5b067c-e255-4746-abd6-88e8ec9d8a85-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "9f5b067c-e255-4746-abd6-88e8ec9d8a85" (UID: "9f5b067c-e255-4746-abd6-88e8ec9d8a85"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:47:05 crc kubenswrapper[4779]: I0929 09:47:05.042028 4779 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9f5b067c-e255-4746-abd6-88e8ec9d8a85-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:05 crc kubenswrapper[4779]: I0929 09:47:05.042057 4779 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9f5b067c-e255-4746-abd6-88e8ec9d8a85-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:05 crc kubenswrapper[4779]: I0929 09:47:05.042068 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q655t\" (UniqueName: \"kubernetes.io/projected/9f5b067c-e255-4746-abd6-88e8ec9d8a85-kube-api-access-q655t\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:05 crc kubenswrapper[4779]: I0929 09:47:05.042078 4779 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9f5b067c-e255-4746-abd6-88e8ec9d8a85-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:05 crc kubenswrapper[4779]: I0929 09:47:05.042087 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f5b067c-e255-4746-abd6-88e8ec9d8a85-config\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:05 crc kubenswrapper[4779]: I0929 09:47:05.221438 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-676856544c-dqj7w"] Sep 29 09:47:05 crc kubenswrapper[4779]: I0929 09:47:05.230152 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-676856544c-dqj7w"] Sep 29 09:47:05 crc kubenswrapper[4779]: I0929 09:47:05.397565 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-r5dk2"] Sep 29 09:47:05 crc kubenswrapper[4779]: W0929 09:47:05.403838 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode5102d17_8f46_47e2_8a21_d8afec790069.slice/crio-b514b4d7ae061d3d404486056b8fe919df710d1606a4e153d61e66b95726a45c WatchSource:0}: Error finding container b514b4d7ae061d3d404486056b8fe919df710d1606a4e153d61e66b95726a45c: Status 404 returned error can't find the container with id b514b4d7ae061d3d404486056b8fe919df710d1606a4e153d61e66b95726a45c Sep 29 09:47:05 crc kubenswrapper[4779]: I0929 09:47:05.407988 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-9l6pm"] Sep 29 09:47:05 crc kubenswrapper[4779]: W0929 09:47:05.410474 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2d380bf6_3887_44a2_91b0_b34c376d1ee6.slice/crio-446d8482815e04a48cbc2d4fa47166bca5bda6cce8f66f96d48e4ed3bda9b6bf WatchSource:0}: Error finding container 446d8482815e04a48cbc2d4fa47166bca5bda6cce8f66f96d48e4ed3bda9b6bf: Status 404 returned error can't find the container with id 446d8482815e04a48cbc2d4fa47166bca5bda6cce8f66f96d48e4ed3bda9b6bf Sep 29 09:47:05 crc kubenswrapper[4779]: W0929 09:47:05.411898 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1b1b89f6_8491_4d78_bfb2_7959e2f6765d.slice/crio-469b8afe3c5e5bd59a2cbbf6ff180f0b0a9fbfb56a8a57ca321d6d6c5ea02afd WatchSource:0}: Error finding container 469b8afe3c5e5bd59a2cbbf6ff180f0b0a9fbfb56a8a57ca321d6d6c5ea02afd: Status 404 returned error can't find the container with id 469b8afe3c5e5bd59a2cbbf6ff180f0b0a9fbfb56a8a57ca321d6d6c5ea02afd Sep 29 09:47:05 crc kubenswrapper[4779]: I0929 09:47:05.416055 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-api-0"] Sep 29 09:47:05 crc kubenswrapper[4779]: I0929 09:47:05.424708 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-kscbf"] Sep 29 09:47:05 crc kubenswrapper[4779]: I0929 09:47:05.875762 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-67b68d58cd-fsdcs"] Sep 29 09:47:05 crc kubenswrapper[4779]: E0929 09:47:05.884382 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f5b067c-e255-4746-abd6-88e8ec9d8a85" containerName="init" Sep 29 09:47:05 crc kubenswrapper[4779]: I0929 09:47:05.884411 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f5b067c-e255-4746-abd6-88e8ec9d8a85" containerName="init" Sep 29 09:47:05 crc kubenswrapper[4779]: E0929 09:47:05.884430 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f5b067c-e255-4746-abd6-88e8ec9d8a85" containerName="dnsmasq-dns" Sep 29 09:47:05 crc kubenswrapper[4779]: I0929 09:47:05.884436 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f5b067c-e255-4746-abd6-88e8ec9d8a85" containerName="dnsmasq-dns" Sep 29 09:47:05 crc kubenswrapper[4779]: E0929 09:47:05.884456 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d52d2b76-d868-4f6d-ab27-20d1d8223952" containerName="placement-db-sync" Sep 29 09:47:05 crc kubenswrapper[4779]: I0929 09:47:05.884462 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="d52d2b76-d868-4f6d-ab27-20d1d8223952" containerName="placement-db-sync" Sep 29 09:47:05 crc kubenswrapper[4779]: I0929 09:47:05.884687 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="d52d2b76-d868-4f6d-ab27-20d1d8223952" containerName="placement-db-sync" Sep 29 09:47:05 crc kubenswrapper[4779]: I0929 09:47:05.884714 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f5b067c-e255-4746-abd6-88e8ec9d8a85" containerName="dnsmasq-dns" Sep 29 09:47:05 crc kubenswrapper[4779]: I0929 09:47:05.885655 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-67b68d58cd-fsdcs" Sep 29 09:47:05 crc kubenswrapper[4779]: I0929 09:47:05.891066 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Sep 29 09:47:05 crc kubenswrapper[4779]: I0929 09:47:05.891260 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Sep 29 09:47:05 crc kubenswrapper[4779]: I0929 09:47:05.891313 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Sep 29 09:47:05 crc kubenswrapper[4779]: I0929 09:47:05.891434 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Sep 29 09:47:05 crc kubenswrapper[4779]: I0929 09:47:05.891449 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-9kwx6" Sep 29 09:47:05 crc kubenswrapper[4779]: I0929 09:47:05.921627 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-67b68d58cd-fsdcs"] Sep 29 09:47:05 crc kubenswrapper[4779]: I0929 09:47:05.928552 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-kscbf" event={"ID":"1b1b89f6-8491-4d78-bfb2-7959e2f6765d","Type":"ContainerStarted","Data":"b0b48659d5759491686c674715b7bcc182aa0be052f7a3aea021da7ae6591444"} Sep 29 09:47:05 crc kubenswrapper[4779]: I0929 09:47:05.928761 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-kscbf" event={"ID":"1b1b89f6-8491-4d78-bfb2-7959e2f6765d","Type":"ContainerStarted","Data":"469b8afe3c5e5bd59a2cbbf6ff180f0b0a9fbfb56a8a57ca321d6d6c5ea02afd"} Sep 29 09:47:05 crc kubenswrapper[4779]: I0929 09:47:05.930479 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"2d380bf6-3887-44a2-91b0-b34c376d1ee6","Type":"ContainerStarted","Data":"88060f87ed244aea5c9e79b7418ac7b6b77b220398530f74559c65915c5d7deb"} Sep 29 09:47:05 crc kubenswrapper[4779]: I0929 09:47:05.930511 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"2d380bf6-3887-44a2-91b0-b34c376d1ee6","Type":"ContainerStarted","Data":"446d8482815e04a48cbc2d4fa47166bca5bda6cce8f66f96d48e4ed3bda9b6bf"} Sep 29 09:47:05 crc kubenswrapper[4779]: I0929 09:47:05.952219 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-r5dk2" event={"ID":"ced430a8-7031-48a6-a86e-e827ef13b166","Type":"ContainerStarted","Data":"159a355342623059b2a7d69cd30beedc566ffc4fa9f5341bd02c36a3289d3b85"} Sep 29 09:47:05 crc kubenswrapper[4779]: I0929 09:47:05.952545 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-r5dk2" event={"ID":"ced430a8-7031-48a6-a86e-e827ef13b166","Type":"ContainerStarted","Data":"7a2f941ac4de747a8d27d75ab61fc9e0994b835f91a0cf08197945c1ab0b5383"} Sep 29 09:47:05 crc kubenswrapper[4779]: I0929 09:47:05.978798 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee014654-7a91-4650-b7d5-76f561da5787-combined-ca-bundle\") pod \"placement-67b68d58cd-fsdcs\" (UID: \"ee014654-7a91-4650-b7d5-76f561da5787\") " pod="openstack/placement-67b68d58cd-fsdcs" Sep 29 09:47:05 crc kubenswrapper[4779]: I0929 09:47:05.979333 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t2qx8\" (UniqueName: \"kubernetes.io/projected/ee014654-7a91-4650-b7d5-76f561da5787-kube-api-access-t2qx8\") pod \"placement-67b68d58cd-fsdcs\" (UID: \"ee014654-7a91-4650-b7d5-76f561da5787\") " pod="openstack/placement-67b68d58cd-fsdcs" Sep 29 09:47:05 crc kubenswrapper[4779]: I0929 09:47:05.979837 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ee014654-7a91-4650-b7d5-76f561da5787-internal-tls-certs\") pod \"placement-67b68d58cd-fsdcs\" (UID: \"ee014654-7a91-4650-b7d5-76f561da5787\") " pod="openstack/placement-67b68d58cd-fsdcs" Sep 29 09:47:05 crc kubenswrapper[4779]: I0929 09:47:05.980127 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ee014654-7a91-4650-b7d5-76f561da5787-public-tls-certs\") pod \"placement-67b68d58cd-fsdcs\" (UID: \"ee014654-7a91-4650-b7d5-76f561da5787\") " pod="openstack/placement-67b68d58cd-fsdcs" Sep 29 09:47:05 crc kubenswrapper[4779]: I0929 09:47:05.980266 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee014654-7a91-4650-b7d5-76f561da5787-config-data\") pod \"placement-67b68d58cd-fsdcs\" (UID: \"ee014654-7a91-4650-b7d5-76f561da5787\") " pod="openstack/placement-67b68d58cd-fsdcs" Sep 29 09:47:05 crc kubenswrapper[4779]: I0929 09:47:05.980292 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ee014654-7a91-4650-b7d5-76f561da5787-scripts\") pod \"placement-67b68d58cd-fsdcs\" (UID: \"ee014654-7a91-4650-b7d5-76f561da5787\") " pod="openstack/placement-67b68d58cd-fsdcs" Sep 29 09:47:05 crc kubenswrapper[4779]: I0929 09:47:05.980344 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ee014654-7a91-4650-b7d5-76f561da5787-logs\") pod \"placement-67b68d58cd-fsdcs\" (UID: \"ee014654-7a91-4650-b7d5-76f561da5787\") " pod="openstack/placement-67b68d58cd-fsdcs" Sep 29 09:47:05 crc kubenswrapper[4779]: I0929 09:47:05.987657 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-7rjgf" event={"ID":"7ce07226-a77d-46d9-a099-04375136d8fc","Type":"ContainerStarted","Data":"c456b6e16ea1a22b8814a3890ae26521782e99f0233c8e5adf80c3adf9c537a2"} Sep 29 09:47:05 crc kubenswrapper[4779]: I0929 09:47:05.993830 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-kscbf" podStartSLOduration=10.993812868 podStartE2EDuration="10.993812868s" podCreationTimestamp="2025-09-29 09:46:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:47:05.98799499 +0000 UTC m=+1057.969318894" watchObservedRunningTime="2025-09-29 09:47:05.993812868 +0000 UTC m=+1057.975136772" Sep 29 09:47:05 crc kubenswrapper[4779]: I0929 09:47:05.998862 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-9l6pm" event={"ID":"e5102d17-8f46-47e2-8a21-d8afec790069","Type":"ContainerStarted","Data":"b514b4d7ae061d3d404486056b8fe919df710d1606a4e153d61e66b95726a45c"} Sep 29 09:47:06 crc kubenswrapper[4779]: I0929 09:47:06.043371 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-r5dk2" podStartSLOduration=10.043352137 podStartE2EDuration="10.043352137s" podCreationTimestamp="2025-09-29 09:46:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:47:06.038130726 +0000 UTC m=+1058.019454650" watchObservedRunningTime="2025-09-29 09:47:06.043352137 +0000 UTC m=+1058.024676041" Sep 29 09:47:06 crc kubenswrapper[4779]: I0929 09:47:06.075813 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-7rjgf" podStartSLOduration=4.901888266 podStartE2EDuration="23.075793443s" podCreationTimestamp="2025-09-29 09:46:43 +0000 UTC" firstStartedPulling="2025-09-29 09:46:46.417860935 +0000 UTC m=+1038.399184839" lastFinishedPulling="2025-09-29 09:47:04.591766112 +0000 UTC m=+1056.573090016" observedRunningTime="2025-09-29 09:47:06.064857967 +0000 UTC m=+1058.046181871" watchObservedRunningTime="2025-09-29 09:47:06.075793443 +0000 UTC m=+1058.057117347" Sep 29 09:47:06 crc kubenswrapper[4779]: I0929 09:47:06.082310 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t2qx8\" (UniqueName: \"kubernetes.io/projected/ee014654-7a91-4650-b7d5-76f561da5787-kube-api-access-t2qx8\") pod \"placement-67b68d58cd-fsdcs\" (UID: \"ee014654-7a91-4650-b7d5-76f561da5787\") " pod="openstack/placement-67b68d58cd-fsdcs" Sep 29 09:47:06 crc kubenswrapper[4779]: I0929 09:47:06.082346 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ee014654-7a91-4650-b7d5-76f561da5787-internal-tls-certs\") pod \"placement-67b68d58cd-fsdcs\" (UID: \"ee014654-7a91-4650-b7d5-76f561da5787\") " pod="openstack/placement-67b68d58cd-fsdcs" Sep 29 09:47:06 crc kubenswrapper[4779]: I0929 09:47:06.082425 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ee014654-7a91-4650-b7d5-76f561da5787-public-tls-certs\") pod \"placement-67b68d58cd-fsdcs\" (UID: \"ee014654-7a91-4650-b7d5-76f561da5787\") " pod="openstack/placement-67b68d58cd-fsdcs" Sep 29 09:47:06 crc kubenswrapper[4779]: I0929 09:47:06.082484 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee014654-7a91-4650-b7d5-76f561da5787-config-data\") pod \"placement-67b68d58cd-fsdcs\" (UID: \"ee014654-7a91-4650-b7d5-76f561da5787\") " pod="openstack/placement-67b68d58cd-fsdcs" Sep 29 09:47:06 crc kubenswrapper[4779]: I0929 09:47:06.082533 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ee014654-7a91-4650-b7d5-76f561da5787-scripts\") pod \"placement-67b68d58cd-fsdcs\" (UID: \"ee014654-7a91-4650-b7d5-76f561da5787\") " pod="openstack/placement-67b68d58cd-fsdcs" Sep 29 09:47:06 crc kubenswrapper[4779]: I0929 09:47:06.082583 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ee014654-7a91-4650-b7d5-76f561da5787-logs\") pod \"placement-67b68d58cd-fsdcs\" (UID: \"ee014654-7a91-4650-b7d5-76f561da5787\") " pod="openstack/placement-67b68d58cd-fsdcs" Sep 29 09:47:06 crc kubenswrapper[4779]: I0929 09:47:06.082678 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee014654-7a91-4650-b7d5-76f561da5787-combined-ca-bundle\") pod \"placement-67b68d58cd-fsdcs\" (UID: \"ee014654-7a91-4650-b7d5-76f561da5787\") " pod="openstack/placement-67b68d58cd-fsdcs" Sep 29 09:47:06 crc kubenswrapper[4779]: I0929 09:47:06.087217 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ee014654-7a91-4650-b7d5-76f561da5787-logs\") pod \"placement-67b68d58cd-fsdcs\" (UID: \"ee014654-7a91-4650-b7d5-76f561da5787\") " pod="openstack/placement-67b68d58cd-fsdcs" Sep 29 09:47:06 crc kubenswrapper[4779]: I0929 09:47:06.088054 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ee014654-7a91-4650-b7d5-76f561da5787-internal-tls-certs\") pod \"placement-67b68d58cd-fsdcs\" (UID: \"ee014654-7a91-4650-b7d5-76f561da5787\") " pod="openstack/placement-67b68d58cd-fsdcs" Sep 29 09:47:06 crc kubenswrapper[4779]: I0929 09:47:06.089493 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ee014654-7a91-4650-b7d5-76f561da5787-public-tls-certs\") pod \"placement-67b68d58cd-fsdcs\" (UID: \"ee014654-7a91-4650-b7d5-76f561da5787\") " pod="openstack/placement-67b68d58cd-fsdcs" Sep 29 09:47:06 crc kubenswrapper[4779]: I0929 09:47:06.092192 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ee014654-7a91-4650-b7d5-76f561da5787-scripts\") pod \"placement-67b68d58cd-fsdcs\" (UID: \"ee014654-7a91-4650-b7d5-76f561da5787\") " pod="openstack/placement-67b68d58cd-fsdcs" Sep 29 09:47:06 crc kubenswrapper[4779]: I0929 09:47:06.093026 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee014654-7a91-4650-b7d5-76f561da5787-config-data\") pod \"placement-67b68d58cd-fsdcs\" (UID: \"ee014654-7a91-4650-b7d5-76f561da5787\") " pod="openstack/placement-67b68d58cd-fsdcs" Sep 29 09:47:06 crc kubenswrapper[4779]: I0929 09:47:06.102422 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee014654-7a91-4650-b7d5-76f561da5787-combined-ca-bundle\") pod \"placement-67b68d58cd-fsdcs\" (UID: \"ee014654-7a91-4650-b7d5-76f561da5787\") " pod="openstack/placement-67b68d58cd-fsdcs" Sep 29 09:47:06 crc kubenswrapper[4779]: I0929 09:47:06.105427 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t2qx8\" (UniqueName: \"kubernetes.io/projected/ee014654-7a91-4650-b7d5-76f561da5787-kube-api-access-t2qx8\") pod \"placement-67b68d58cd-fsdcs\" (UID: \"ee014654-7a91-4650-b7d5-76f561da5787\") " pod="openstack/placement-67b68d58cd-fsdcs" Sep 29 09:47:06 crc kubenswrapper[4779]: I0929 09:47:06.212178 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-67b68d58cd-fsdcs" Sep 29 09:47:06 crc kubenswrapper[4779]: I0929 09:47:06.731493 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9f5b067c-e255-4746-abd6-88e8ec9d8a85" path="/var/lib/kubelet/pods/9f5b067c-e255-4746-abd6-88e8ec9d8a85/volumes" Sep 29 09:47:06 crc kubenswrapper[4779]: I0929 09:47:06.754542 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-67b68d58cd-fsdcs"] Sep 29 09:47:07 crc kubenswrapper[4779]: I0929 09:47:07.016587 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"2d380bf6-3887-44a2-91b0-b34c376d1ee6","Type":"ContainerStarted","Data":"945728a2e7c29de6ae7aa1504cf5070635ea6c2e42be481b32f3a251dc0e7fce"} Sep 29 09:47:07 crc kubenswrapper[4779]: I0929 09:47:07.017020 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-api-0" Sep 29 09:47:07 crc kubenswrapper[4779]: I0929 09:47:07.022875 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-67b68d58cd-fsdcs" event={"ID":"ee014654-7a91-4650-b7d5-76f561da5787","Type":"ContainerStarted","Data":"72b9356d68c708281c53701fd47dddb39a9bd03a71a94141510934b279437b48"} Sep 29 09:47:07 crc kubenswrapper[4779]: I0929 09:47:07.023315 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/watcher-applier-0" podUID="cdba510b-3621-4b0b-bdb1-ba1c455bcd4e" containerName="watcher-applier" containerID="cri-o://2dcdbb1a94c47be2591ea4abf54cee2a3c8cc747adfe577f64f879ed52599462" gracePeriod=30 Sep 29 09:47:07 crc kubenswrapper[4779]: I0929 09:47:07.038850 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-api-0" podStartSLOduration=13.038829444 podStartE2EDuration="13.038829444s" podCreationTimestamp="2025-09-29 09:46:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:47:07.033878001 +0000 UTC m=+1059.015201925" watchObservedRunningTime="2025-09-29 09:47:07.038829444 +0000 UTC m=+1059.020153348" Sep 29 09:47:08 crc kubenswrapper[4779]: I0929 09:47:08.034554 4779 generic.go:334] "Generic (PLEG): container finished" podID="cdba510b-3621-4b0b-bdb1-ba1c455bcd4e" containerID="2dcdbb1a94c47be2591ea4abf54cee2a3c8cc747adfe577f64f879ed52599462" exitCode=0 Sep 29 09:47:08 crc kubenswrapper[4779]: I0929 09:47:08.034636 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-applier-0" event={"ID":"cdba510b-3621-4b0b-bdb1-ba1c455bcd4e","Type":"ContainerDied","Data":"2dcdbb1a94c47be2591ea4abf54cee2a3c8cc747adfe577f64f879ed52599462"} Sep 29 09:47:08 crc kubenswrapper[4779]: I0929 09:47:08.038380 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-67b68d58cd-fsdcs" event={"ID":"ee014654-7a91-4650-b7d5-76f561da5787","Type":"ContainerStarted","Data":"c5d535b63d04665d4b225bc9eeb97960b2753e4fce5c55371e363c16d4dc6f5b"} Sep 29 09:47:08 crc kubenswrapper[4779]: I0929 09:47:08.038434 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-67b68d58cd-fsdcs" event={"ID":"ee014654-7a91-4650-b7d5-76f561da5787","Type":"ContainerStarted","Data":"0fa0fe42c81744095ed11da994285edfac36949095440dab76ed0a243ae76dd5"} Sep 29 09:47:08 crc kubenswrapper[4779]: I0929 09:47:08.038508 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-67b68d58cd-fsdcs" Sep 29 09:47:08 crc kubenswrapper[4779]: I0929 09:47:08.041391 4779 generic.go:334] "Generic (PLEG): container finished" podID="f67e636b-969b-48ee-bbec-3d8b38b22274" containerID="dc57f62b4e8aac95d8856e4547b1ab206d4fe4ef4ceeb48680216143e21ffd0b" exitCode=1 Sep 29 09:47:08 crc kubenswrapper[4779]: I0929 09:47:08.041478 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"f67e636b-969b-48ee-bbec-3d8b38b22274","Type":"ContainerDied","Data":"dc57f62b4e8aac95d8856e4547b1ab206d4fe4ef4ceeb48680216143e21ffd0b"} Sep 29 09:47:08 crc kubenswrapper[4779]: I0929 09:47:08.041546 4779 scope.go:117] "RemoveContainer" containerID="f4db79b816cb5076e98d3f536fc9dff544415797bf8695d0d9264d3e25a07f7d" Sep 29 09:47:08 crc kubenswrapper[4779]: I0929 09:47:08.042488 4779 scope.go:117] "RemoveContainer" containerID="dc57f62b4e8aac95d8856e4547b1ab206d4fe4ef4ceeb48680216143e21ffd0b" Sep 29 09:47:08 crc kubenswrapper[4779]: E0929 09:47:08.042959 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-decision-engine\" with CrashLoopBackOff: \"back-off 10s restarting failed container=watcher-decision-engine pod=watcher-decision-engine-0_openstack(f67e636b-969b-48ee-bbec-3d8b38b22274)\"" pod="openstack/watcher-decision-engine-0" podUID="f67e636b-969b-48ee-bbec-3d8b38b22274" Sep 29 09:47:08 crc kubenswrapper[4779]: I0929 09:47:08.060761 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-67b68d58cd-fsdcs" podStartSLOduration=3.060739663 podStartE2EDuration="3.060739663s" podCreationTimestamp="2025-09-29 09:47:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:47:08.058040645 +0000 UTC m=+1060.039364549" watchObservedRunningTime="2025-09-29 09:47:08.060739663 +0000 UTC m=+1060.042063567" Sep 29 09:47:09 crc kubenswrapper[4779]: I0929 09:47:09.055178 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-67b68d58cd-fsdcs" Sep 29 09:47:09 crc kubenswrapper[4779]: E0929 09:47:09.333054 4779 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2dcdbb1a94c47be2591ea4abf54cee2a3c8cc747adfe577f64f879ed52599462 is running failed: container process not found" containerID="2dcdbb1a94c47be2591ea4abf54cee2a3c8cc747adfe577f64f879ed52599462" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Sep 29 09:47:09 crc kubenswrapper[4779]: E0929 09:47:09.333389 4779 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2dcdbb1a94c47be2591ea4abf54cee2a3c8cc747adfe577f64f879ed52599462 is running failed: container process not found" containerID="2dcdbb1a94c47be2591ea4abf54cee2a3c8cc747adfe577f64f879ed52599462" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Sep 29 09:47:09 crc kubenswrapper[4779]: E0929 09:47:09.333704 4779 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2dcdbb1a94c47be2591ea4abf54cee2a3c8cc747adfe577f64f879ed52599462 is running failed: container process not found" containerID="2dcdbb1a94c47be2591ea4abf54cee2a3c8cc747adfe577f64f879ed52599462" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Sep 29 09:47:09 crc kubenswrapper[4779]: E0929 09:47:09.333729 4779 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2dcdbb1a94c47be2591ea4abf54cee2a3c8cc747adfe577f64f879ed52599462 is running failed: container process not found" probeType="Readiness" pod="openstack/watcher-applier-0" podUID="cdba510b-3621-4b0b-bdb1-ba1c455bcd4e" containerName="watcher-applier" Sep 29 09:47:09 crc kubenswrapper[4779]: I0929 09:47:09.347055 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-api-0" Sep 29 09:47:10 crc kubenswrapper[4779]: I0929 09:47:10.194589 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-applier-0" Sep 29 09:47:10 crc kubenswrapper[4779]: I0929 09:47:10.216646 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-api-0" Sep 29 09:47:10 crc kubenswrapper[4779]: I0929 09:47:10.304103 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cdba510b-3621-4b0b-bdb1-ba1c455bcd4e-config-data\") pod \"cdba510b-3621-4b0b-bdb1-ba1c455bcd4e\" (UID: \"cdba510b-3621-4b0b-bdb1-ba1c455bcd4e\") " Sep 29 09:47:10 crc kubenswrapper[4779]: I0929 09:47:10.304249 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w2vvk\" (UniqueName: \"kubernetes.io/projected/cdba510b-3621-4b0b-bdb1-ba1c455bcd4e-kube-api-access-w2vvk\") pod \"cdba510b-3621-4b0b-bdb1-ba1c455bcd4e\" (UID: \"cdba510b-3621-4b0b-bdb1-ba1c455bcd4e\") " Sep 29 09:47:10 crc kubenswrapper[4779]: I0929 09:47:10.304307 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cdba510b-3621-4b0b-bdb1-ba1c455bcd4e-combined-ca-bundle\") pod \"cdba510b-3621-4b0b-bdb1-ba1c455bcd4e\" (UID: \"cdba510b-3621-4b0b-bdb1-ba1c455bcd4e\") " Sep 29 09:47:10 crc kubenswrapper[4779]: I0929 09:47:10.304865 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cdba510b-3621-4b0b-bdb1-ba1c455bcd4e-logs\") pod \"cdba510b-3621-4b0b-bdb1-ba1c455bcd4e\" (UID: \"cdba510b-3621-4b0b-bdb1-ba1c455bcd4e\") " Sep 29 09:47:10 crc kubenswrapper[4779]: I0929 09:47:10.305854 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cdba510b-3621-4b0b-bdb1-ba1c455bcd4e-logs" (OuterVolumeSpecName: "logs") pod "cdba510b-3621-4b0b-bdb1-ba1c455bcd4e" (UID: "cdba510b-3621-4b0b-bdb1-ba1c455bcd4e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:47:10 crc kubenswrapper[4779]: I0929 09:47:10.327358 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cdba510b-3621-4b0b-bdb1-ba1c455bcd4e-kube-api-access-w2vvk" (OuterVolumeSpecName: "kube-api-access-w2vvk") pod "cdba510b-3621-4b0b-bdb1-ba1c455bcd4e" (UID: "cdba510b-3621-4b0b-bdb1-ba1c455bcd4e"). InnerVolumeSpecName "kube-api-access-w2vvk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:47:10 crc kubenswrapper[4779]: I0929 09:47:10.331835 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cdba510b-3621-4b0b-bdb1-ba1c455bcd4e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cdba510b-3621-4b0b-bdb1-ba1c455bcd4e" (UID: "cdba510b-3621-4b0b-bdb1-ba1c455bcd4e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:47:10 crc kubenswrapper[4779]: I0929 09:47:10.367895 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cdba510b-3621-4b0b-bdb1-ba1c455bcd4e-config-data" (OuterVolumeSpecName: "config-data") pod "cdba510b-3621-4b0b-bdb1-ba1c455bcd4e" (UID: "cdba510b-3621-4b0b-bdb1-ba1c455bcd4e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:47:10 crc kubenswrapper[4779]: I0929 09:47:10.406583 4779 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cdba510b-3621-4b0b-bdb1-ba1c455bcd4e-logs\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:10 crc kubenswrapper[4779]: I0929 09:47:10.406619 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cdba510b-3621-4b0b-bdb1-ba1c455bcd4e-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:10 crc kubenswrapper[4779]: I0929 09:47:10.406629 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w2vvk\" (UniqueName: \"kubernetes.io/projected/cdba510b-3621-4b0b-bdb1-ba1c455bcd4e-kube-api-access-w2vvk\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:10 crc kubenswrapper[4779]: I0929 09:47:10.406638 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cdba510b-3621-4b0b-bdb1-ba1c455bcd4e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:11 crc kubenswrapper[4779]: I0929 09:47:11.072429 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-applier-0" event={"ID":"cdba510b-3621-4b0b-bdb1-ba1c455bcd4e","Type":"ContainerDied","Data":"619c418c9c1fb149cb4061885b17219a3b0b39234fc1568c3a59aea8d78a0566"} Sep 29 09:47:11 crc kubenswrapper[4779]: I0929 09:47:11.072631 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-applier-0" Sep 29 09:47:11 crc kubenswrapper[4779]: I0929 09:47:11.078842 4779 generic.go:334] "Generic (PLEG): container finished" podID="1b1b89f6-8491-4d78-bfb2-7959e2f6765d" containerID="b0b48659d5759491686c674715b7bcc182aa0be052f7a3aea021da7ae6591444" exitCode=0 Sep 29 09:47:11 crc kubenswrapper[4779]: I0929 09:47:11.078894 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-kscbf" event={"ID":"1b1b89f6-8491-4d78-bfb2-7959e2f6765d","Type":"ContainerDied","Data":"b0b48659d5759491686c674715b7bcc182aa0be052f7a3aea021da7ae6591444"} Sep 29 09:47:11 crc kubenswrapper[4779]: I0929 09:47:11.111008 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-applier-0"] Sep 29 09:47:11 crc kubenswrapper[4779]: I0929 09:47:11.127886 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/watcher-applier-0"] Sep 29 09:47:11 crc kubenswrapper[4779]: I0929 09:47:11.137084 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-applier-0"] Sep 29 09:47:11 crc kubenswrapper[4779]: E0929 09:47:11.137528 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cdba510b-3621-4b0b-bdb1-ba1c455bcd4e" containerName="watcher-applier" Sep 29 09:47:11 crc kubenswrapper[4779]: I0929 09:47:11.137547 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="cdba510b-3621-4b0b-bdb1-ba1c455bcd4e" containerName="watcher-applier" Sep 29 09:47:11 crc kubenswrapper[4779]: I0929 09:47:11.137709 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="cdba510b-3621-4b0b-bdb1-ba1c455bcd4e" containerName="watcher-applier" Sep 29 09:47:11 crc kubenswrapper[4779]: I0929 09:47:11.138507 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-applier-0" Sep 29 09:47:11 crc kubenswrapper[4779]: I0929 09:47:11.143383 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-applier-config-data" Sep 29 09:47:11 crc kubenswrapper[4779]: I0929 09:47:11.150680 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-applier-0"] Sep 29 09:47:11 crc kubenswrapper[4779]: I0929 09:47:11.226321 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/af9be7eb-53f5-4de6-a341-4ab3d8168181-config-data\") pod \"watcher-applier-0\" (UID: \"af9be7eb-53f5-4de6-a341-4ab3d8168181\") " pod="openstack/watcher-applier-0" Sep 29 09:47:11 crc kubenswrapper[4779]: I0929 09:47:11.226389 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4mfwk\" (UniqueName: \"kubernetes.io/projected/af9be7eb-53f5-4de6-a341-4ab3d8168181-kube-api-access-4mfwk\") pod \"watcher-applier-0\" (UID: \"af9be7eb-53f5-4de6-a341-4ab3d8168181\") " pod="openstack/watcher-applier-0" Sep 29 09:47:11 crc kubenswrapper[4779]: I0929 09:47:11.226460 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/af9be7eb-53f5-4de6-a341-4ab3d8168181-logs\") pod \"watcher-applier-0\" (UID: \"af9be7eb-53f5-4de6-a341-4ab3d8168181\") " pod="openstack/watcher-applier-0" Sep 29 09:47:11 crc kubenswrapper[4779]: I0929 09:47:11.226685 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af9be7eb-53f5-4de6-a341-4ab3d8168181-combined-ca-bundle\") pod \"watcher-applier-0\" (UID: \"af9be7eb-53f5-4de6-a341-4ab3d8168181\") " pod="openstack/watcher-applier-0" Sep 29 09:47:11 crc kubenswrapper[4779]: I0929 09:47:11.328881 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af9be7eb-53f5-4de6-a341-4ab3d8168181-combined-ca-bundle\") pod \"watcher-applier-0\" (UID: \"af9be7eb-53f5-4de6-a341-4ab3d8168181\") " pod="openstack/watcher-applier-0" Sep 29 09:47:11 crc kubenswrapper[4779]: I0929 09:47:11.329017 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/af9be7eb-53f5-4de6-a341-4ab3d8168181-config-data\") pod \"watcher-applier-0\" (UID: \"af9be7eb-53f5-4de6-a341-4ab3d8168181\") " pod="openstack/watcher-applier-0" Sep 29 09:47:11 crc kubenswrapper[4779]: I0929 09:47:11.329059 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4mfwk\" (UniqueName: \"kubernetes.io/projected/af9be7eb-53f5-4de6-a341-4ab3d8168181-kube-api-access-4mfwk\") pod \"watcher-applier-0\" (UID: \"af9be7eb-53f5-4de6-a341-4ab3d8168181\") " pod="openstack/watcher-applier-0" Sep 29 09:47:11 crc kubenswrapper[4779]: I0929 09:47:11.329112 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/af9be7eb-53f5-4de6-a341-4ab3d8168181-logs\") pod \"watcher-applier-0\" (UID: \"af9be7eb-53f5-4de6-a341-4ab3d8168181\") " pod="openstack/watcher-applier-0" Sep 29 09:47:11 crc kubenswrapper[4779]: I0929 09:47:11.329560 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/af9be7eb-53f5-4de6-a341-4ab3d8168181-logs\") pod \"watcher-applier-0\" (UID: \"af9be7eb-53f5-4de6-a341-4ab3d8168181\") " pod="openstack/watcher-applier-0" Sep 29 09:47:11 crc kubenswrapper[4779]: I0929 09:47:11.342508 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/af9be7eb-53f5-4de6-a341-4ab3d8168181-config-data\") pod \"watcher-applier-0\" (UID: \"af9be7eb-53f5-4de6-a341-4ab3d8168181\") " pod="openstack/watcher-applier-0" Sep 29 09:47:11 crc kubenswrapper[4779]: I0929 09:47:11.346187 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4mfwk\" (UniqueName: \"kubernetes.io/projected/af9be7eb-53f5-4de6-a341-4ab3d8168181-kube-api-access-4mfwk\") pod \"watcher-applier-0\" (UID: \"af9be7eb-53f5-4de6-a341-4ab3d8168181\") " pod="openstack/watcher-applier-0" Sep 29 09:47:11 crc kubenswrapper[4779]: I0929 09:47:11.346707 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af9be7eb-53f5-4de6-a341-4ab3d8168181-combined-ca-bundle\") pod \"watcher-applier-0\" (UID: \"af9be7eb-53f5-4de6-a341-4ab3d8168181\") " pod="openstack/watcher-applier-0" Sep 29 09:47:11 crc kubenswrapper[4779]: I0929 09:47:11.462671 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-applier-0" Sep 29 09:47:12 crc kubenswrapper[4779]: I0929 09:47:12.384005 4779 scope.go:117] "RemoveContainer" containerID="2dcdbb1a94c47be2591ea4abf54cee2a3c8cc747adfe577f64f879ed52599462" Sep 29 09:47:12 crc kubenswrapper[4779]: I0929 09:47:12.685540 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-kscbf" Sep 29 09:47:12 crc kubenswrapper[4779]: I0929 09:47:12.728587 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cdba510b-3621-4b0b-bdb1-ba1c455bcd4e" path="/var/lib/kubelet/pods/cdba510b-3621-4b0b-bdb1-ba1c455bcd4e/volumes" Sep 29 09:47:12 crc kubenswrapper[4779]: I0929 09:47:12.753488 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1b1b89f6-8491-4d78-bfb2-7959e2f6765d-fernet-keys\") pod \"1b1b89f6-8491-4d78-bfb2-7959e2f6765d\" (UID: \"1b1b89f6-8491-4d78-bfb2-7959e2f6765d\") " Sep 29 09:47:12 crc kubenswrapper[4779]: I0929 09:47:12.753629 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tgb2d\" (UniqueName: \"kubernetes.io/projected/1b1b89f6-8491-4d78-bfb2-7959e2f6765d-kube-api-access-tgb2d\") pod \"1b1b89f6-8491-4d78-bfb2-7959e2f6765d\" (UID: \"1b1b89f6-8491-4d78-bfb2-7959e2f6765d\") " Sep 29 09:47:12 crc kubenswrapper[4779]: I0929 09:47:12.753695 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1b1b89f6-8491-4d78-bfb2-7959e2f6765d-credential-keys\") pod \"1b1b89f6-8491-4d78-bfb2-7959e2f6765d\" (UID: \"1b1b89f6-8491-4d78-bfb2-7959e2f6765d\") " Sep 29 09:47:12 crc kubenswrapper[4779]: I0929 09:47:12.753721 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1b1b89f6-8491-4d78-bfb2-7959e2f6765d-scripts\") pod \"1b1b89f6-8491-4d78-bfb2-7959e2f6765d\" (UID: \"1b1b89f6-8491-4d78-bfb2-7959e2f6765d\") " Sep 29 09:47:12 crc kubenswrapper[4779]: I0929 09:47:12.753839 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b1b89f6-8491-4d78-bfb2-7959e2f6765d-combined-ca-bundle\") pod \"1b1b89f6-8491-4d78-bfb2-7959e2f6765d\" (UID: \"1b1b89f6-8491-4d78-bfb2-7959e2f6765d\") " Sep 29 09:47:12 crc kubenswrapper[4779]: I0929 09:47:12.755704 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1b1b89f6-8491-4d78-bfb2-7959e2f6765d-config-data\") pod \"1b1b89f6-8491-4d78-bfb2-7959e2f6765d\" (UID: \"1b1b89f6-8491-4d78-bfb2-7959e2f6765d\") " Sep 29 09:47:12 crc kubenswrapper[4779]: I0929 09:47:12.763260 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1b1b89f6-8491-4d78-bfb2-7959e2f6765d-scripts" (OuterVolumeSpecName: "scripts") pod "1b1b89f6-8491-4d78-bfb2-7959e2f6765d" (UID: "1b1b89f6-8491-4d78-bfb2-7959e2f6765d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:47:12 crc kubenswrapper[4779]: I0929 09:47:12.763264 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1b1b89f6-8491-4d78-bfb2-7959e2f6765d-kube-api-access-tgb2d" (OuterVolumeSpecName: "kube-api-access-tgb2d") pod "1b1b89f6-8491-4d78-bfb2-7959e2f6765d" (UID: "1b1b89f6-8491-4d78-bfb2-7959e2f6765d"). InnerVolumeSpecName "kube-api-access-tgb2d". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:47:12 crc kubenswrapper[4779]: I0929 09:47:12.763627 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1b1b89f6-8491-4d78-bfb2-7959e2f6765d-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "1b1b89f6-8491-4d78-bfb2-7959e2f6765d" (UID: "1b1b89f6-8491-4d78-bfb2-7959e2f6765d"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:47:12 crc kubenswrapper[4779]: I0929 09:47:12.768457 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1b1b89f6-8491-4d78-bfb2-7959e2f6765d-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "1b1b89f6-8491-4d78-bfb2-7959e2f6765d" (UID: "1b1b89f6-8491-4d78-bfb2-7959e2f6765d"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:47:12 crc kubenswrapper[4779]: I0929 09:47:12.800448 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1b1b89f6-8491-4d78-bfb2-7959e2f6765d-config-data" (OuterVolumeSpecName: "config-data") pod "1b1b89f6-8491-4d78-bfb2-7959e2f6765d" (UID: "1b1b89f6-8491-4d78-bfb2-7959e2f6765d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:47:12 crc kubenswrapper[4779]: I0929 09:47:12.804961 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1b1b89f6-8491-4d78-bfb2-7959e2f6765d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1b1b89f6-8491-4d78-bfb2-7959e2f6765d" (UID: "1b1b89f6-8491-4d78-bfb2-7959e2f6765d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:47:12 crc kubenswrapper[4779]: I0929 09:47:12.859972 4779 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1b1b89f6-8491-4d78-bfb2-7959e2f6765d-credential-keys\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:12 crc kubenswrapper[4779]: I0929 09:47:12.860022 4779 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1b1b89f6-8491-4d78-bfb2-7959e2f6765d-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:12 crc kubenswrapper[4779]: I0929 09:47:12.860039 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b1b89f6-8491-4d78-bfb2-7959e2f6765d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:12 crc kubenswrapper[4779]: I0929 09:47:12.860055 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1b1b89f6-8491-4d78-bfb2-7959e2f6765d-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:12 crc kubenswrapper[4779]: I0929 09:47:12.860070 4779 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1b1b89f6-8491-4d78-bfb2-7959e2f6765d-fernet-keys\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:12 crc kubenswrapper[4779]: I0929 09:47:12.860084 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tgb2d\" (UniqueName: \"kubernetes.io/projected/1b1b89f6-8491-4d78-bfb2-7959e2f6765d-kube-api-access-tgb2d\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:12 crc kubenswrapper[4779]: I0929 09:47:12.883676 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-applier-0"] Sep 29 09:47:12 crc kubenswrapper[4779]: W0929 09:47:12.886652 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaf9be7eb_53f5_4de6_a341_4ab3d8168181.slice/crio-40f736888c997300219bf7cab4d945611720fe1f1ee6a63bed179d214eabe5d0 WatchSource:0}: Error finding container 40f736888c997300219bf7cab4d945611720fe1f1ee6a63bed179d214eabe5d0: Status 404 returned error can't find the container with id 40f736888c997300219bf7cab4d945611720fe1f1ee6a63bed179d214eabe5d0 Sep 29 09:47:13 crc kubenswrapper[4779]: I0929 09:47:13.096572 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-kscbf" event={"ID":"1b1b89f6-8491-4d78-bfb2-7959e2f6765d","Type":"ContainerDied","Data":"469b8afe3c5e5bd59a2cbbf6ff180f0b0a9fbfb56a8a57ca321d6d6c5ea02afd"} Sep 29 09:47:13 crc kubenswrapper[4779]: I0929 09:47:13.097086 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="469b8afe3c5e5bd59a2cbbf6ff180f0b0a9fbfb56a8a57ca321d6d6c5ea02afd" Sep 29 09:47:13 crc kubenswrapper[4779]: I0929 09:47:13.096887 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-kscbf" Sep 29 09:47:13 crc kubenswrapper[4779]: I0929 09:47:13.099531 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"785e1b61-3f05-4ac1-ade7-57c25fe6f177","Type":"ContainerStarted","Data":"f17b5eb38581eabcc30f952ba49ed29c2beb5ce49347b128736c1713c162d65d"} Sep 29 09:47:13 crc kubenswrapper[4779]: I0929 09:47:13.103057 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-applier-0" event={"ID":"af9be7eb-53f5-4de6-a341-4ab3d8168181","Type":"ContainerStarted","Data":"1a9ebe5287eb3737f4de18687f6d4c0a6487338f34d575501044cd1200441e78"} Sep 29 09:47:13 crc kubenswrapper[4779]: I0929 09:47:13.103100 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-applier-0" event={"ID":"af9be7eb-53f5-4de6-a341-4ab3d8168181","Type":"ContainerStarted","Data":"40f736888c997300219bf7cab4d945611720fe1f1ee6a63bed179d214eabe5d0"} Sep 29 09:47:13 crc kubenswrapper[4779]: I0929 09:47:13.105054 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-9l6pm" event={"ID":"e5102d17-8f46-47e2-8a21-d8afec790069","Type":"ContainerStarted","Data":"37359daf67a0c5c01f804fcfbb1eb5f2a163e6762852fc72e7ed7179c701ffaa"} Sep 29 09:47:13 crc kubenswrapper[4779]: I0929 09:47:13.130609 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-applier-0" podStartSLOduration=2.130590094 podStartE2EDuration="2.130590094s" podCreationTimestamp="2025-09-29 09:47:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:47:13.123567352 +0000 UTC m=+1065.104891256" watchObservedRunningTime="2025-09-29 09:47:13.130590094 +0000 UTC m=+1065.111914008" Sep 29 09:47:13 crc kubenswrapper[4779]: I0929 09:47:13.159722 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-9l6pm" podStartSLOduration=11.111552024 podStartE2EDuration="18.159699754s" podCreationTimestamp="2025-09-29 09:46:55 +0000 UTC" firstStartedPulling="2025-09-29 09:47:05.410174511 +0000 UTC m=+1057.391498415" lastFinishedPulling="2025-09-29 09:47:12.458322221 +0000 UTC m=+1064.439646145" observedRunningTime="2025-09-29 09:47:13.149187491 +0000 UTC m=+1065.130511395" watchObservedRunningTime="2025-09-29 09:47:13.159699754 +0000 UTC m=+1065.141023658" Sep 29 09:47:13 crc kubenswrapper[4779]: I0929 09:47:13.215452 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-864d875667-zb59s"] Sep 29 09:47:13 crc kubenswrapper[4779]: E0929 09:47:13.215936 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b1b89f6-8491-4d78-bfb2-7959e2f6765d" containerName="keystone-bootstrap" Sep 29 09:47:13 crc kubenswrapper[4779]: I0929 09:47:13.215959 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b1b89f6-8491-4d78-bfb2-7959e2f6765d" containerName="keystone-bootstrap" Sep 29 09:47:13 crc kubenswrapper[4779]: I0929 09:47:13.216194 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="1b1b89f6-8491-4d78-bfb2-7959e2f6765d" containerName="keystone-bootstrap" Sep 29 09:47:13 crc kubenswrapper[4779]: I0929 09:47:13.217031 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-864d875667-zb59s" Sep 29 09:47:13 crc kubenswrapper[4779]: I0929 09:47:13.218622 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-n7qtr" Sep 29 09:47:13 crc kubenswrapper[4779]: I0929 09:47:13.219077 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 29 09:47:13 crc kubenswrapper[4779]: I0929 09:47:13.219251 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Sep 29 09:47:13 crc kubenswrapper[4779]: I0929 09:47:13.219483 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Sep 29 09:47:13 crc kubenswrapper[4779]: I0929 09:47:13.219654 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 29 09:47:13 crc kubenswrapper[4779]: I0929 09:47:13.221386 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 29 09:47:13 crc kubenswrapper[4779]: I0929 09:47:13.223225 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-864d875667-zb59s"] Sep 29 09:47:13 crc kubenswrapper[4779]: I0929 09:47:13.269942 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec529845-7c69-4888-9bb2-d9cd15db73ed-config-data\") pod \"keystone-864d875667-zb59s\" (UID: \"ec529845-7c69-4888-9bb2-d9cd15db73ed\") " pod="openstack/keystone-864d875667-zb59s" Sep 29 09:47:13 crc kubenswrapper[4779]: I0929 09:47:13.270017 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec529845-7c69-4888-9bb2-d9cd15db73ed-scripts\") pod \"keystone-864d875667-zb59s\" (UID: \"ec529845-7c69-4888-9bb2-d9cd15db73ed\") " pod="openstack/keystone-864d875667-zb59s" Sep 29 09:47:13 crc kubenswrapper[4779]: I0929 09:47:13.270087 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ec529845-7c69-4888-9bb2-d9cd15db73ed-credential-keys\") pod \"keystone-864d875667-zb59s\" (UID: \"ec529845-7c69-4888-9bb2-d9cd15db73ed\") " pod="openstack/keystone-864d875667-zb59s" Sep 29 09:47:13 crc kubenswrapper[4779]: I0929 09:47:13.270206 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ec529845-7c69-4888-9bb2-d9cd15db73ed-internal-tls-certs\") pod \"keystone-864d875667-zb59s\" (UID: \"ec529845-7c69-4888-9bb2-d9cd15db73ed\") " pod="openstack/keystone-864d875667-zb59s" Sep 29 09:47:13 crc kubenswrapper[4779]: I0929 09:47:13.270378 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec529845-7c69-4888-9bb2-d9cd15db73ed-combined-ca-bundle\") pod \"keystone-864d875667-zb59s\" (UID: \"ec529845-7c69-4888-9bb2-d9cd15db73ed\") " pod="openstack/keystone-864d875667-zb59s" Sep 29 09:47:13 crc kubenswrapper[4779]: I0929 09:47:13.270416 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fsjmt\" (UniqueName: \"kubernetes.io/projected/ec529845-7c69-4888-9bb2-d9cd15db73ed-kube-api-access-fsjmt\") pod \"keystone-864d875667-zb59s\" (UID: \"ec529845-7c69-4888-9bb2-d9cd15db73ed\") " pod="openstack/keystone-864d875667-zb59s" Sep 29 09:47:13 crc kubenswrapper[4779]: I0929 09:47:13.270459 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ec529845-7c69-4888-9bb2-d9cd15db73ed-public-tls-certs\") pod \"keystone-864d875667-zb59s\" (UID: \"ec529845-7c69-4888-9bb2-d9cd15db73ed\") " pod="openstack/keystone-864d875667-zb59s" Sep 29 09:47:13 crc kubenswrapper[4779]: I0929 09:47:13.270532 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ec529845-7c69-4888-9bb2-d9cd15db73ed-fernet-keys\") pod \"keystone-864d875667-zb59s\" (UID: \"ec529845-7c69-4888-9bb2-d9cd15db73ed\") " pod="openstack/keystone-864d875667-zb59s" Sep 29 09:47:13 crc kubenswrapper[4779]: I0929 09:47:13.372102 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ec529845-7c69-4888-9bb2-d9cd15db73ed-fernet-keys\") pod \"keystone-864d875667-zb59s\" (UID: \"ec529845-7c69-4888-9bb2-d9cd15db73ed\") " pod="openstack/keystone-864d875667-zb59s" Sep 29 09:47:13 crc kubenswrapper[4779]: I0929 09:47:13.372239 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec529845-7c69-4888-9bb2-d9cd15db73ed-config-data\") pod \"keystone-864d875667-zb59s\" (UID: \"ec529845-7c69-4888-9bb2-d9cd15db73ed\") " pod="openstack/keystone-864d875667-zb59s" Sep 29 09:47:13 crc kubenswrapper[4779]: I0929 09:47:13.372268 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec529845-7c69-4888-9bb2-d9cd15db73ed-scripts\") pod \"keystone-864d875667-zb59s\" (UID: \"ec529845-7c69-4888-9bb2-d9cd15db73ed\") " pod="openstack/keystone-864d875667-zb59s" Sep 29 09:47:13 crc kubenswrapper[4779]: I0929 09:47:13.372306 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ec529845-7c69-4888-9bb2-d9cd15db73ed-credential-keys\") pod \"keystone-864d875667-zb59s\" (UID: \"ec529845-7c69-4888-9bb2-d9cd15db73ed\") " pod="openstack/keystone-864d875667-zb59s" Sep 29 09:47:13 crc kubenswrapper[4779]: I0929 09:47:13.372361 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ec529845-7c69-4888-9bb2-d9cd15db73ed-internal-tls-certs\") pod \"keystone-864d875667-zb59s\" (UID: \"ec529845-7c69-4888-9bb2-d9cd15db73ed\") " pod="openstack/keystone-864d875667-zb59s" Sep 29 09:47:13 crc kubenswrapper[4779]: I0929 09:47:13.372473 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec529845-7c69-4888-9bb2-d9cd15db73ed-combined-ca-bundle\") pod \"keystone-864d875667-zb59s\" (UID: \"ec529845-7c69-4888-9bb2-d9cd15db73ed\") " pod="openstack/keystone-864d875667-zb59s" Sep 29 09:47:13 crc kubenswrapper[4779]: I0929 09:47:13.372506 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fsjmt\" (UniqueName: \"kubernetes.io/projected/ec529845-7c69-4888-9bb2-d9cd15db73ed-kube-api-access-fsjmt\") pod \"keystone-864d875667-zb59s\" (UID: \"ec529845-7c69-4888-9bb2-d9cd15db73ed\") " pod="openstack/keystone-864d875667-zb59s" Sep 29 09:47:13 crc kubenswrapper[4779]: I0929 09:47:13.372527 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ec529845-7c69-4888-9bb2-d9cd15db73ed-public-tls-certs\") pod \"keystone-864d875667-zb59s\" (UID: \"ec529845-7c69-4888-9bb2-d9cd15db73ed\") " pod="openstack/keystone-864d875667-zb59s" Sep 29 09:47:13 crc kubenswrapper[4779]: I0929 09:47:13.377354 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ec529845-7c69-4888-9bb2-d9cd15db73ed-credential-keys\") pod \"keystone-864d875667-zb59s\" (UID: \"ec529845-7c69-4888-9bb2-d9cd15db73ed\") " pod="openstack/keystone-864d875667-zb59s" Sep 29 09:47:13 crc kubenswrapper[4779]: I0929 09:47:13.378333 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ec529845-7c69-4888-9bb2-d9cd15db73ed-public-tls-certs\") pod \"keystone-864d875667-zb59s\" (UID: \"ec529845-7c69-4888-9bb2-d9cd15db73ed\") " pod="openstack/keystone-864d875667-zb59s" Sep 29 09:47:13 crc kubenswrapper[4779]: I0929 09:47:13.379777 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec529845-7c69-4888-9bb2-d9cd15db73ed-config-data\") pod \"keystone-864d875667-zb59s\" (UID: \"ec529845-7c69-4888-9bb2-d9cd15db73ed\") " pod="openstack/keystone-864d875667-zb59s" Sep 29 09:47:13 crc kubenswrapper[4779]: I0929 09:47:13.380460 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec529845-7c69-4888-9bb2-d9cd15db73ed-combined-ca-bundle\") pod \"keystone-864d875667-zb59s\" (UID: \"ec529845-7c69-4888-9bb2-d9cd15db73ed\") " pod="openstack/keystone-864d875667-zb59s" Sep 29 09:47:13 crc kubenswrapper[4779]: I0929 09:47:13.380479 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ec529845-7c69-4888-9bb2-d9cd15db73ed-internal-tls-certs\") pod \"keystone-864d875667-zb59s\" (UID: \"ec529845-7c69-4888-9bb2-d9cd15db73ed\") " pod="openstack/keystone-864d875667-zb59s" Sep 29 09:47:13 crc kubenswrapper[4779]: I0929 09:47:13.383242 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec529845-7c69-4888-9bb2-d9cd15db73ed-scripts\") pod \"keystone-864d875667-zb59s\" (UID: \"ec529845-7c69-4888-9bb2-d9cd15db73ed\") " pod="openstack/keystone-864d875667-zb59s" Sep 29 09:47:13 crc kubenswrapper[4779]: I0929 09:47:13.386666 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ec529845-7c69-4888-9bb2-d9cd15db73ed-fernet-keys\") pod \"keystone-864d875667-zb59s\" (UID: \"ec529845-7c69-4888-9bb2-d9cd15db73ed\") " pod="openstack/keystone-864d875667-zb59s" Sep 29 09:47:13 crc kubenswrapper[4779]: I0929 09:47:13.393932 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fsjmt\" (UniqueName: \"kubernetes.io/projected/ec529845-7c69-4888-9bb2-d9cd15db73ed-kube-api-access-fsjmt\") pod \"keystone-864d875667-zb59s\" (UID: \"ec529845-7c69-4888-9bb2-d9cd15db73ed\") " pod="openstack/keystone-864d875667-zb59s" Sep 29 09:47:13 crc kubenswrapper[4779]: I0929 09:47:13.559855 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-864d875667-zb59s" Sep 29 09:47:14 crc kubenswrapper[4779]: I0929 09:47:14.053461 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-864d875667-zb59s"] Sep 29 09:47:14 crc kubenswrapper[4779]: I0929 09:47:14.121116 4779 generic.go:334] "Generic (PLEG): container finished" podID="7ce07226-a77d-46d9-a099-04375136d8fc" containerID="c456b6e16ea1a22b8814a3890ae26521782e99f0233c8e5adf80c3adf9c537a2" exitCode=0 Sep 29 09:47:14 crc kubenswrapper[4779]: I0929 09:47:14.121177 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-7rjgf" event={"ID":"7ce07226-a77d-46d9-a099-04375136d8fc","Type":"ContainerDied","Data":"c456b6e16ea1a22b8814a3890ae26521782e99f0233c8e5adf80c3adf9c537a2"} Sep 29 09:47:14 crc kubenswrapper[4779]: I0929 09:47:14.126060 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-864d875667-zb59s" event={"ID":"ec529845-7c69-4888-9bb2-d9cd15db73ed","Type":"ContainerStarted","Data":"80adadfa2ab5d8815f3fa77445bb0c23e9c1931f6d91d0a3dc4c57395b521a63"} Sep 29 09:47:14 crc kubenswrapper[4779]: I0929 09:47:14.294780 4779 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack/watcher-decision-engine-0" Sep 29 09:47:14 crc kubenswrapper[4779]: I0929 09:47:14.295306 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-decision-engine-0" Sep 29 09:47:14 crc kubenswrapper[4779]: I0929 09:47:14.295384 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Sep 29 09:47:14 crc kubenswrapper[4779]: I0929 09:47:14.295402 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Sep 29 09:47:14 crc kubenswrapper[4779]: I0929 09:47:14.298126 4779 scope.go:117] "RemoveContainer" containerID="dc57f62b4e8aac95d8856e4547b1ab206d4fe4ef4ceeb48680216143e21ffd0b" Sep 29 09:47:14 crc kubenswrapper[4779]: E0929 09:47:14.298387 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-decision-engine\" with CrashLoopBackOff: \"back-off 10s restarting failed container=watcher-decision-engine pod=watcher-decision-engine-0_openstack(f67e636b-969b-48ee-bbec-3d8b38b22274)\"" pod="openstack/watcher-decision-engine-0" podUID="f67e636b-969b-48ee-bbec-3d8b38b22274" Sep 29 09:47:15 crc kubenswrapper[4779]: I0929 09:47:15.135986 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-864d875667-zb59s" event={"ID":"ec529845-7c69-4888-9bb2-d9cd15db73ed","Type":"ContainerStarted","Data":"4ebfca4688d28696d7af459063e070190ddced6c6da7af783516ef3932158756"} Sep 29 09:47:15 crc kubenswrapper[4779]: I0929 09:47:15.136515 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-864d875667-zb59s" Sep 29 09:47:15 crc kubenswrapper[4779]: I0929 09:47:15.138137 4779 scope.go:117] "RemoveContainer" containerID="dc57f62b4e8aac95d8856e4547b1ab206d4fe4ef4ceeb48680216143e21ffd0b" Sep 29 09:47:15 crc kubenswrapper[4779]: E0929 09:47:15.138417 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-decision-engine\" with CrashLoopBackOff: \"back-off 10s restarting failed container=watcher-decision-engine pod=watcher-decision-engine-0_openstack(f67e636b-969b-48ee-bbec-3d8b38b22274)\"" pod="openstack/watcher-decision-engine-0" podUID="f67e636b-969b-48ee-bbec-3d8b38b22274" Sep 29 09:47:15 crc kubenswrapper[4779]: I0929 09:47:15.171020 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-864d875667-zb59s" podStartSLOduration=2.170999034 podStartE2EDuration="2.170999034s" podCreationTimestamp="2025-09-29 09:47:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:47:15.162365825 +0000 UTC m=+1067.143689739" watchObservedRunningTime="2025-09-29 09:47:15.170999034 +0000 UTC m=+1067.152322938" Sep 29 09:47:15 crc kubenswrapper[4779]: I0929 09:47:15.218073 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-api-0" Sep 29 09:47:15 crc kubenswrapper[4779]: I0929 09:47:15.223395 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/watcher-api-0" Sep 29 09:47:15 crc kubenswrapper[4779]: I0929 09:47:15.484970 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-7rjgf" Sep 29 09:47:15 crc kubenswrapper[4779]: I0929 09:47:15.517682 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ce07226-a77d-46d9-a099-04375136d8fc-config-data\") pod \"7ce07226-a77d-46d9-a099-04375136d8fc\" (UID: \"7ce07226-a77d-46d9-a099-04375136d8fc\") " Sep 29 09:47:15 crc kubenswrapper[4779]: I0929 09:47:15.518044 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7ce07226-a77d-46d9-a099-04375136d8fc-db-sync-config-data\") pod \"7ce07226-a77d-46d9-a099-04375136d8fc\" (UID: \"7ce07226-a77d-46d9-a099-04375136d8fc\") " Sep 29 09:47:15 crc kubenswrapper[4779]: I0929 09:47:15.518181 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ce07226-a77d-46d9-a099-04375136d8fc-scripts\") pod \"7ce07226-a77d-46d9-a099-04375136d8fc\" (UID: \"7ce07226-a77d-46d9-a099-04375136d8fc\") " Sep 29 09:47:15 crc kubenswrapper[4779]: I0929 09:47:15.518221 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7ce07226-a77d-46d9-a099-04375136d8fc-etc-machine-id\") pod \"7ce07226-a77d-46d9-a099-04375136d8fc\" (UID: \"7ce07226-a77d-46d9-a099-04375136d8fc\") " Sep 29 09:47:15 crc kubenswrapper[4779]: I0929 09:47:15.518273 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ce07226-a77d-46d9-a099-04375136d8fc-combined-ca-bundle\") pod \"7ce07226-a77d-46d9-a099-04375136d8fc\" (UID: \"7ce07226-a77d-46d9-a099-04375136d8fc\") " Sep 29 09:47:15 crc kubenswrapper[4779]: I0929 09:47:15.518306 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cr9vh\" (UniqueName: \"kubernetes.io/projected/7ce07226-a77d-46d9-a099-04375136d8fc-kube-api-access-cr9vh\") pod \"7ce07226-a77d-46d9-a099-04375136d8fc\" (UID: \"7ce07226-a77d-46d9-a099-04375136d8fc\") " Sep 29 09:47:15 crc kubenswrapper[4779]: I0929 09:47:15.519632 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7ce07226-a77d-46d9-a099-04375136d8fc-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "7ce07226-a77d-46d9-a099-04375136d8fc" (UID: "7ce07226-a77d-46d9-a099-04375136d8fc"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 09:47:15 crc kubenswrapper[4779]: I0929 09:47:15.524398 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ce07226-a77d-46d9-a099-04375136d8fc-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "7ce07226-a77d-46d9-a099-04375136d8fc" (UID: "7ce07226-a77d-46d9-a099-04375136d8fc"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:47:15 crc kubenswrapper[4779]: I0929 09:47:15.524581 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7ce07226-a77d-46d9-a099-04375136d8fc-kube-api-access-cr9vh" (OuterVolumeSpecName: "kube-api-access-cr9vh") pod "7ce07226-a77d-46d9-a099-04375136d8fc" (UID: "7ce07226-a77d-46d9-a099-04375136d8fc"). InnerVolumeSpecName "kube-api-access-cr9vh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:47:15 crc kubenswrapper[4779]: I0929 09:47:15.538354 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ce07226-a77d-46d9-a099-04375136d8fc-scripts" (OuterVolumeSpecName: "scripts") pod "7ce07226-a77d-46d9-a099-04375136d8fc" (UID: "7ce07226-a77d-46d9-a099-04375136d8fc"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:47:15 crc kubenswrapper[4779]: I0929 09:47:15.556185 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ce07226-a77d-46d9-a099-04375136d8fc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7ce07226-a77d-46d9-a099-04375136d8fc" (UID: "7ce07226-a77d-46d9-a099-04375136d8fc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:47:15 crc kubenswrapper[4779]: I0929 09:47:15.583532 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ce07226-a77d-46d9-a099-04375136d8fc-config-data" (OuterVolumeSpecName: "config-data") pod "7ce07226-a77d-46d9-a099-04375136d8fc" (UID: "7ce07226-a77d-46d9-a099-04375136d8fc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:47:15 crc kubenswrapper[4779]: I0929 09:47:15.620603 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ce07226-a77d-46d9-a099-04375136d8fc-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:15 crc kubenswrapper[4779]: I0929 09:47:15.620634 4779 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7ce07226-a77d-46d9-a099-04375136d8fc-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:15 crc kubenswrapper[4779]: I0929 09:47:15.620649 4779 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ce07226-a77d-46d9-a099-04375136d8fc-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:15 crc kubenswrapper[4779]: I0929 09:47:15.620657 4779 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7ce07226-a77d-46d9-a099-04375136d8fc-etc-machine-id\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:15 crc kubenswrapper[4779]: I0929 09:47:15.620666 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ce07226-a77d-46d9-a099-04375136d8fc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:15 crc kubenswrapper[4779]: I0929 09:47:15.620675 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cr9vh\" (UniqueName: \"kubernetes.io/projected/7ce07226-a77d-46d9-a099-04375136d8fc-kube-api-access-cr9vh\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.145817 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-7rjgf" event={"ID":"7ce07226-a77d-46d9-a099-04375136d8fc","Type":"ContainerDied","Data":"e719e61e3059397f328eaa3f003565b92388b98011a62e8a06499163ad4a26ce"} Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.145868 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e719e61e3059397f328eaa3f003565b92388b98011a62e8a06499163ad4a26ce" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.146090 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-7rjgf" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.150936 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-api-0" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.390187 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 09:47:16 crc kubenswrapper[4779]: E0929 09:47:16.391031 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ce07226-a77d-46d9-a099-04375136d8fc" containerName="cinder-db-sync" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.391052 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ce07226-a77d-46d9-a099-04375136d8fc" containerName="cinder-db-sync" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.391234 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ce07226-a77d-46d9-a099-04375136d8fc" containerName="cinder-db-sync" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.392184 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.401888 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-fts6t" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.404555 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.404842 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.405351 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.425063 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.435009 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bbf17a9c-86ea-4b10-a73f-81758085521e-config-data\") pod \"cinder-scheduler-0\" (UID: \"bbf17a9c-86ea-4b10-a73f-81758085521e\") " pod="openstack/cinder-scheduler-0" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.435055 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bbf17a9c-86ea-4b10-a73f-81758085521e-scripts\") pod \"cinder-scheduler-0\" (UID: \"bbf17a9c-86ea-4b10-a73f-81758085521e\") " pod="openstack/cinder-scheduler-0" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.435073 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bbf17a9c-86ea-4b10-a73f-81758085521e-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"bbf17a9c-86ea-4b10-a73f-81758085521e\") " pod="openstack/cinder-scheduler-0" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.435087 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bbf17a9c-86ea-4b10-a73f-81758085521e-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"bbf17a9c-86ea-4b10-a73f-81758085521e\") " pod="openstack/cinder-scheduler-0" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.435110 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbf17a9c-86ea-4b10-a73f-81758085521e-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"bbf17a9c-86ea-4b10-a73f-81758085521e\") " pod="openstack/cinder-scheduler-0" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.435183 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cjvm8\" (UniqueName: \"kubernetes.io/projected/bbf17a9c-86ea-4b10-a73f-81758085521e-kube-api-access-cjvm8\") pod \"cinder-scheduler-0\" (UID: \"bbf17a9c-86ea-4b10-a73f-81758085521e\") " pod="openstack/cinder-scheduler-0" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.465038 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-applier-0" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.486937 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5c4564557-zhwlz"] Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.489424 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c4564557-zhwlz" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.538938 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cjvm8\" (UniqueName: \"kubernetes.io/projected/bbf17a9c-86ea-4b10-a73f-81758085521e-kube-api-access-cjvm8\") pod \"cinder-scheduler-0\" (UID: \"bbf17a9c-86ea-4b10-a73f-81758085521e\") " pod="openstack/cinder-scheduler-0" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.538990 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b699141c-f160-495b-9954-891be088873e-dns-svc\") pod \"dnsmasq-dns-5c4564557-zhwlz\" (UID: \"b699141c-f160-495b-9954-891be088873e\") " pod="openstack/dnsmasq-dns-5c4564557-zhwlz" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.539030 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b699141c-f160-495b-9954-891be088873e-ovsdbserver-nb\") pod \"dnsmasq-dns-5c4564557-zhwlz\" (UID: \"b699141c-f160-495b-9954-891be088873e\") " pod="openstack/dnsmasq-dns-5c4564557-zhwlz" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.539100 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qz88t\" (UniqueName: \"kubernetes.io/projected/b699141c-f160-495b-9954-891be088873e-kube-api-access-qz88t\") pod \"dnsmasq-dns-5c4564557-zhwlz\" (UID: \"b699141c-f160-495b-9954-891be088873e\") " pod="openstack/dnsmasq-dns-5c4564557-zhwlz" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.539118 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b699141c-f160-495b-9954-891be088873e-config\") pod \"dnsmasq-dns-5c4564557-zhwlz\" (UID: \"b699141c-f160-495b-9954-891be088873e\") " pod="openstack/dnsmasq-dns-5c4564557-zhwlz" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.539245 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b699141c-f160-495b-9954-891be088873e-ovsdbserver-sb\") pod \"dnsmasq-dns-5c4564557-zhwlz\" (UID: \"b699141c-f160-495b-9954-891be088873e\") " pod="openstack/dnsmasq-dns-5c4564557-zhwlz" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.539267 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bbf17a9c-86ea-4b10-a73f-81758085521e-config-data\") pod \"cinder-scheduler-0\" (UID: \"bbf17a9c-86ea-4b10-a73f-81758085521e\") " pod="openstack/cinder-scheduler-0" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.539290 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bbf17a9c-86ea-4b10-a73f-81758085521e-scripts\") pod \"cinder-scheduler-0\" (UID: \"bbf17a9c-86ea-4b10-a73f-81758085521e\") " pod="openstack/cinder-scheduler-0" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.539310 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bbf17a9c-86ea-4b10-a73f-81758085521e-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"bbf17a9c-86ea-4b10-a73f-81758085521e\") " pod="openstack/cinder-scheduler-0" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.539328 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bbf17a9c-86ea-4b10-a73f-81758085521e-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"bbf17a9c-86ea-4b10-a73f-81758085521e\") " pod="openstack/cinder-scheduler-0" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.539353 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbf17a9c-86ea-4b10-a73f-81758085521e-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"bbf17a9c-86ea-4b10-a73f-81758085521e\") " pod="openstack/cinder-scheduler-0" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.543118 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bbf17a9c-86ea-4b10-a73f-81758085521e-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"bbf17a9c-86ea-4b10-a73f-81758085521e\") " pod="openstack/cinder-scheduler-0" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.549954 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bbf17a9c-86ea-4b10-a73f-81758085521e-config-data\") pod \"cinder-scheduler-0\" (UID: \"bbf17a9c-86ea-4b10-a73f-81758085521e\") " pod="openstack/cinder-scheduler-0" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.555165 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c4564557-zhwlz"] Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.557818 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bbf17a9c-86ea-4b10-a73f-81758085521e-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"bbf17a9c-86ea-4b10-a73f-81758085521e\") " pod="openstack/cinder-scheduler-0" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.561034 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbf17a9c-86ea-4b10-a73f-81758085521e-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"bbf17a9c-86ea-4b10-a73f-81758085521e\") " pod="openstack/cinder-scheduler-0" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.561328 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cjvm8\" (UniqueName: \"kubernetes.io/projected/bbf17a9c-86ea-4b10-a73f-81758085521e-kube-api-access-cjvm8\") pod \"cinder-scheduler-0\" (UID: \"bbf17a9c-86ea-4b10-a73f-81758085521e\") " pod="openstack/cinder-scheduler-0" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.576440 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bbf17a9c-86ea-4b10-a73f-81758085521e-scripts\") pod \"cinder-scheduler-0\" (UID: \"bbf17a9c-86ea-4b10-a73f-81758085521e\") " pod="openstack/cinder-scheduler-0" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.617264 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.618796 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.624465 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.628942 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.644086 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bd1b88bc-6a49-42c8-8a07-9887030daf75-etc-machine-id\") pod \"cinder-api-0\" (UID: \"bd1b88bc-6a49-42c8-8a07-9887030daf75\") " pod="openstack/cinder-api-0" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.644139 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ffjjf\" (UniqueName: \"kubernetes.io/projected/bd1b88bc-6a49-42c8-8a07-9887030daf75-kube-api-access-ffjjf\") pod \"cinder-api-0\" (UID: \"bd1b88bc-6a49-42c8-8a07-9887030daf75\") " pod="openstack/cinder-api-0" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.644158 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd1b88bc-6a49-42c8-8a07-9887030daf75-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"bd1b88bc-6a49-42c8-8a07-9887030daf75\") " pod="openstack/cinder-api-0" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.644225 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b699141c-f160-495b-9954-891be088873e-dns-svc\") pod \"dnsmasq-dns-5c4564557-zhwlz\" (UID: \"b699141c-f160-495b-9954-891be088873e\") " pod="openstack/dnsmasq-dns-5c4564557-zhwlz" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.644249 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b699141c-f160-495b-9954-891be088873e-ovsdbserver-nb\") pod \"dnsmasq-dns-5c4564557-zhwlz\" (UID: \"b699141c-f160-495b-9954-891be088873e\") " pod="openstack/dnsmasq-dns-5c4564557-zhwlz" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.644397 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qz88t\" (UniqueName: \"kubernetes.io/projected/b699141c-f160-495b-9954-891be088873e-kube-api-access-qz88t\") pod \"dnsmasq-dns-5c4564557-zhwlz\" (UID: \"b699141c-f160-495b-9954-891be088873e\") " pod="openstack/dnsmasq-dns-5c4564557-zhwlz" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.644424 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b699141c-f160-495b-9954-891be088873e-config\") pod \"dnsmasq-dns-5c4564557-zhwlz\" (UID: \"b699141c-f160-495b-9954-891be088873e\") " pod="openstack/dnsmasq-dns-5c4564557-zhwlz" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.644463 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bd1b88bc-6a49-42c8-8a07-9887030daf75-scripts\") pod \"cinder-api-0\" (UID: \"bd1b88bc-6a49-42c8-8a07-9887030daf75\") " pod="openstack/cinder-api-0" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.644533 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd1b88bc-6a49-42c8-8a07-9887030daf75-config-data\") pod \"cinder-api-0\" (UID: \"bd1b88bc-6a49-42c8-8a07-9887030daf75\") " pod="openstack/cinder-api-0" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.644566 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bd1b88bc-6a49-42c8-8a07-9887030daf75-config-data-custom\") pod \"cinder-api-0\" (UID: \"bd1b88bc-6a49-42c8-8a07-9887030daf75\") " pod="openstack/cinder-api-0" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.644657 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bd1b88bc-6a49-42c8-8a07-9887030daf75-logs\") pod \"cinder-api-0\" (UID: \"bd1b88bc-6a49-42c8-8a07-9887030daf75\") " pod="openstack/cinder-api-0" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.644694 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b699141c-f160-495b-9954-891be088873e-ovsdbserver-sb\") pod \"dnsmasq-dns-5c4564557-zhwlz\" (UID: \"b699141c-f160-495b-9954-891be088873e\") " pod="openstack/dnsmasq-dns-5c4564557-zhwlz" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.645268 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b699141c-f160-495b-9954-891be088873e-config\") pod \"dnsmasq-dns-5c4564557-zhwlz\" (UID: \"b699141c-f160-495b-9954-891be088873e\") " pod="openstack/dnsmasq-dns-5c4564557-zhwlz" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.645287 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b699141c-f160-495b-9954-891be088873e-ovsdbserver-nb\") pod \"dnsmasq-dns-5c4564557-zhwlz\" (UID: \"b699141c-f160-495b-9954-891be088873e\") " pod="openstack/dnsmasq-dns-5c4564557-zhwlz" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.645390 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b699141c-f160-495b-9954-891be088873e-dns-svc\") pod \"dnsmasq-dns-5c4564557-zhwlz\" (UID: \"b699141c-f160-495b-9954-891be088873e\") " pod="openstack/dnsmasq-dns-5c4564557-zhwlz" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.645448 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b699141c-f160-495b-9954-891be088873e-ovsdbserver-sb\") pod \"dnsmasq-dns-5c4564557-zhwlz\" (UID: \"b699141c-f160-495b-9954-891be088873e\") " pod="openstack/dnsmasq-dns-5c4564557-zhwlz" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.660130 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qz88t\" (UniqueName: \"kubernetes.io/projected/b699141c-f160-495b-9954-891be088873e-kube-api-access-qz88t\") pod \"dnsmasq-dns-5c4564557-zhwlz\" (UID: \"b699141c-f160-495b-9954-891be088873e\") " pod="openstack/dnsmasq-dns-5c4564557-zhwlz" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.712473 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.747214 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bd1b88bc-6a49-42c8-8a07-9887030daf75-etc-machine-id\") pod \"cinder-api-0\" (UID: \"bd1b88bc-6a49-42c8-8a07-9887030daf75\") " pod="openstack/cinder-api-0" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.747490 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ffjjf\" (UniqueName: \"kubernetes.io/projected/bd1b88bc-6a49-42c8-8a07-9887030daf75-kube-api-access-ffjjf\") pod \"cinder-api-0\" (UID: \"bd1b88bc-6a49-42c8-8a07-9887030daf75\") " pod="openstack/cinder-api-0" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.747508 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd1b88bc-6a49-42c8-8a07-9887030daf75-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"bd1b88bc-6a49-42c8-8a07-9887030daf75\") " pod="openstack/cinder-api-0" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.747604 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bd1b88bc-6a49-42c8-8a07-9887030daf75-scripts\") pod \"cinder-api-0\" (UID: \"bd1b88bc-6a49-42c8-8a07-9887030daf75\") " pod="openstack/cinder-api-0" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.747653 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd1b88bc-6a49-42c8-8a07-9887030daf75-config-data\") pod \"cinder-api-0\" (UID: \"bd1b88bc-6a49-42c8-8a07-9887030daf75\") " pod="openstack/cinder-api-0" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.747673 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bd1b88bc-6a49-42c8-8a07-9887030daf75-config-data-custom\") pod \"cinder-api-0\" (UID: \"bd1b88bc-6a49-42c8-8a07-9887030daf75\") " pod="openstack/cinder-api-0" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.747697 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bd1b88bc-6a49-42c8-8a07-9887030daf75-logs\") pod \"cinder-api-0\" (UID: \"bd1b88bc-6a49-42c8-8a07-9887030daf75\") " pod="openstack/cinder-api-0" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.747352 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bd1b88bc-6a49-42c8-8a07-9887030daf75-etc-machine-id\") pod \"cinder-api-0\" (UID: \"bd1b88bc-6a49-42c8-8a07-9887030daf75\") " pod="openstack/cinder-api-0" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.748647 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bd1b88bc-6a49-42c8-8a07-9887030daf75-logs\") pod \"cinder-api-0\" (UID: \"bd1b88bc-6a49-42c8-8a07-9887030daf75\") " pod="openstack/cinder-api-0" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.753858 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bd1b88bc-6a49-42c8-8a07-9887030daf75-scripts\") pod \"cinder-api-0\" (UID: \"bd1b88bc-6a49-42c8-8a07-9887030daf75\") " pod="openstack/cinder-api-0" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.754736 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd1b88bc-6a49-42c8-8a07-9887030daf75-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"bd1b88bc-6a49-42c8-8a07-9887030daf75\") " pod="openstack/cinder-api-0" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.760503 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd1b88bc-6a49-42c8-8a07-9887030daf75-config-data\") pod \"cinder-api-0\" (UID: \"bd1b88bc-6a49-42c8-8a07-9887030daf75\") " pod="openstack/cinder-api-0" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.769008 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ffjjf\" (UniqueName: \"kubernetes.io/projected/bd1b88bc-6a49-42c8-8a07-9887030daf75-kube-api-access-ffjjf\") pod \"cinder-api-0\" (UID: \"bd1b88bc-6a49-42c8-8a07-9887030daf75\") " pod="openstack/cinder-api-0" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.775034 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bd1b88bc-6a49-42c8-8a07-9887030daf75-config-data-custom\") pod \"cinder-api-0\" (UID: \"bd1b88bc-6a49-42c8-8a07-9887030daf75\") " pod="openstack/cinder-api-0" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.845124 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c4564557-zhwlz" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.959512 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.967113 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.967177 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.967226 4779 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.967981 4779 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"fcd55a765537b74d7fe03acdaa880fd723f800adde7aab67d7d2e84cbd82c102"} pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 09:47:16 crc kubenswrapper[4779]: I0929 09:47:16.968060 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" containerID="cri-o://fcd55a765537b74d7fe03acdaa880fd723f800adde7aab67d7d2e84cbd82c102" gracePeriod=600 Sep 29 09:47:17 crc kubenswrapper[4779]: I0929 09:47:17.157327 4779 generic.go:334] "Generic (PLEG): container finished" podID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerID="fcd55a765537b74d7fe03acdaa880fd723f800adde7aab67d7d2e84cbd82c102" exitCode=0 Sep 29 09:47:17 crc kubenswrapper[4779]: I0929 09:47:17.157364 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" event={"ID":"f1a5d3a7-37d9-4a87-864c-e4af7f504a19","Type":"ContainerDied","Data":"fcd55a765537b74d7fe03acdaa880fd723f800adde7aab67d7d2e84cbd82c102"} Sep 29 09:47:17 crc kubenswrapper[4779]: I0929 09:47:17.157717 4779 scope.go:117] "RemoveContainer" containerID="0e0bb92440d884ad90defff16322d948e1df51b9f7349061e9da58a7e515a610" Sep 29 09:47:17 crc kubenswrapper[4779]: I0929 09:47:17.211460 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 09:47:17 crc kubenswrapper[4779]: W0929 09:47:17.215610 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbbf17a9c_86ea_4b10_a73f_81758085521e.slice/crio-e1f467a122e19cf112529cb725323f776e41700237dae8c5c7f7f38543dbfb1b WatchSource:0}: Error finding container e1f467a122e19cf112529cb725323f776e41700237dae8c5c7f7f38543dbfb1b: Status 404 returned error can't find the container with id e1f467a122e19cf112529cb725323f776e41700237dae8c5c7f7f38543dbfb1b Sep 29 09:47:17 crc kubenswrapper[4779]: I0929 09:47:17.306625 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c4564557-zhwlz"] Sep 29 09:47:17 crc kubenswrapper[4779]: I0929 09:47:17.464856 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 29 09:47:17 crc kubenswrapper[4779]: W0929 09:47:17.470425 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbd1b88bc_6a49_42c8_8a07_9887030daf75.slice/crio-68776508c67b4c0464a13daee0cd3f7e79b859773e62b900460c638d1d8d2561 WatchSource:0}: Error finding container 68776508c67b4c0464a13daee0cd3f7e79b859773e62b900460c638d1d8d2561: Status 404 returned error can't find the container with id 68776508c67b4c0464a13daee0cd3f7e79b859773e62b900460c638d1d8d2561 Sep 29 09:47:18 crc kubenswrapper[4779]: I0929 09:47:18.179062 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"bbf17a9c-86ea-4b10-a73f-81758085521e","Type":"ContainerStarted","Data":"e1f467a122e19cf112529cb725323f776e41700237dae8c5c7f7f38543dbfb1b"} Sep 29 09:47:18 crc kubenswrapper[4779]: I0929 09:47:18.182198 4779 generic.go:334] "Generic (PLEG): container finished" podID="b699141c-f160-495b-9954-891be088873e" containerID="307ed76f7b7950af27ab794e02a8cafff89b1c316d7ef97b0f902732010d625f" exitCode=0 Sep 29 09:47:18 crc kubenswrapper[4779]: I0929 09:47:18.182248 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c4564557-zhwlz" event={"ID":"b699141c-f160-495b-9954-891be088873e","Type":"ContainerDied","Data":"307ed76f7b7950af27ab794e02a8cafff89b1c316d7ef97b0f902732010d625f"} Sep 29 09:47:18 crc kubenswrapper[4779]: I0929 09:47:18.182311 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c4564557-zhwlz" event={"ID":"b699141c-f160-495b-9954-891be088873e","Type":"ContainerStarted","Data":"022d8a67c1a13b586e8c1f7a7a97b1fa12d7bf0be5d58730104a6d8c48d43d7f"} Sep 29 09:47:18 crc kubenswrapper[4779]: I0929 09:47:18.183926 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"bd1b88bc-6a49-42c8-8a07-9887030daf75","Type":"ContainerStarted","Data":"6d416c201fc6350bc36a311c5718e7aa7a01c858bafd3e6cacaf6b4ab23a663a"} Sep 29 09:47:18 crc kubenswrapper[4779]: I0929 09:47:18.183974 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"bd1b88bc-6a49-42c8-8a07-9887030daf75","Type":"ContainerStarted","Data":"68776508c67b4c0464a13daee0cd3f7e79b859773e62b900460c638d1d8d2561"} Sep 29 09:47:18 crc kubenswrapper[4779]: I0929 09:47:18.585206 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Sep 29 09:47:19 crc kubenswrapper[4779]: I0929 09:47:19.193773 4779 generic.go:334] "Generic (PLEG): container finished" podID="e5102d17-8f46-47e2-8a21-d8afec790069" containerID="37359daf67a0c5c01f804fcfbb1eb5f2a163e6762852fc72e7ed7179c701ffaa" exitCode=0 Sep 29 09:47:19 crc kubenswrapper[4779]: I0929 09:47:19.193799 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-9l6pm" event={"ID":"e5102d17-8f46-47e2-8a21-d8afec790069","Type":"ContainerDied","Data":"37359daf67a0c5c01f804fcfbb1eb5f2a163e6762852fc72e7ed7179c701ffaa"} Sep 29 09:47:21 crc kubenswrapper[4779]: I0929 09:47:21.463705 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-applier-0" Sep 29 09:47:21 crc kubenswrapper[4779]: I0929 09:47:21.488242 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/watcher-applier-0" Sep 29 09:47:22 crc kubenswrapper[4779]: I0929 09:47:22.271836 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-applier-0" Sep 29 09:47:23 crc kubenswrapper[4779]: I0929 09:47:23.005569 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-api-0"] Sep 29 09:47:23 crc kubenswrapper[4779]: I0929 09:47:23.006167 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/watcher-api-0" podUID="2d380bf6-3887-44a2-91b0-b34c376d1ee6" containerName="watcher-api-log" containerID="cri-o://88060f87ed244aea5c9e79b7418ac7b6b77b220398530f74559c65915c5d7deb" gracePeriod=30 Sep 29 09:47:23 crc kubenswrapper[4779]: I0929 09:47:23.006681 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/watcher-api-0" podUID="2d380bf6-3887-44a2-91b0-b34c376d1ee6" containerName="watcher-api" containerID="cri-o://945728a2e7c29de6ae7aa1504cf5070635ea6c2e42be481b32f3a251dc0e7fce" gracePeriod=30 Sep 29 09:47:23 crc kubenswrapper[4779]: I0929 09:47:23.291514 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-9l6pm" event={"ID":"e5102d17-8f46-47e2-8a21-d8afec790069","Type":"ContainerDied","Data":"b514b4d7ae061d3d404486056b8fe919df710d1606a4e153d61e66b95726a45c"} Sep 29 09:47:23 crc kubenswrapper[4779]: I0929 09:47:23.291549 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b514b4d7ae061d3d404486056b8fe919df710d1606a4e153d61e66b95726a45c" Sep 29 09:47:23 crc kubenswrapper[4779]: I0929 09:47:23.295015 4779 generic.go:334] "Generic (PLEG): container finished" podID="2d380bf6-3887-44a2-91b0-b34c376d1ee6" containerID="88060f87ed244aea5c9e79b7418ac7b6b77b220398530f74559c65915c5d7deb" exitCode=143 Sep 29 09:47:23 crc kubenswrapper[4779]: I0929 09:47:23.295150 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"2d380bf6-3887-44a2-91b0-b34c376d1ee6","Type":"ContainerDied","Data":"88060f87ed244aea5c9e79b7418ac7b6b77b220398530f74559c65915c5d7deb"} Sep 29 09:47:23 crc kubenswrapper[4779]: I0929 09:47:23.303815 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-9l6pm" Sep 29 09:47:23 crc kubenswrapper[4779]: I0929 09:47:23.480758 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9pgjq\" (UniqueName: \"kubernetes.io/projected/e5102d17-8f46-47e2-8a21-d8afec790069-kube-api-access-9pgjq\") pod \"e5102d17-8f46-47e2-8a21-d8afec790069\" (UID: \"e5102d17-8f46-47e2-8a21-d8afec790069\") " Sep 29 09:47:23 crc kubenswrapper[4779]: I0929 09:47:23.481132 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e5102d17-8f46-47e2-8a21-d8afec790069-db-sync-config-data\") pod \"e5102d17-8f46-47e2-8a21-d8afec790069\" (UID: \"e5102d17-8f46-47e2-8a21-d8afec790069\") " Sep 29 09:47:23 crc kubenswrapper[4779]: I0929 09:47:23.481202 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5102d17-8f46-47e2-8a21-d8afec790069-combined-ca-bundle\") pod \"e5102d17-8f46-47e2-8a21-d8afec790069\" (UID: \"e5102d17-8f46-47e2-8a21-d8afec790069\") " Sep 29 09:47:23 crc kubenswrapper[4779]: I0929 09:47:23.492190 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e5102d17-8f46-47e2-8a21-d8afec790069-kube-api-access-9pgjq" (OuterVolumeSpecName: "kube-api-access-9pgjq") pod "e5102d17-8f46-47e2-8a21-d8afec790069" (UID: "e5102d17-8f46-47e2-8a21-d8afec790069"). InnerVolumeSpecName "kube-api-access-9pgjq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:47:23 crc kubenswrapper[4779]: I0929 09:47:23.502054 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e5102d17-8f46-47e2-8a21-d8afec790069-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "e5102d17-8f46-47e2-8a21-d8afec790069" (UID: "e5102d17-8f46-47e2-8a21-d8afec790069"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:47:23 crc kubenswrapper[4779]: I0929 09:47:23.511142 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e5102d17-8f46-47e2-8a21-d8afec790069-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e5102d17-8f46-47e2-8a21-d8afec790069" (UID: "e5102d17-8f46-47e2-8a21-d8afec790069"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:47:23 crc kubenswrapper[4779]: I0929 09:47:23.583252 4779 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e5102d17-8f46-47e2-8a21-d8afec790069-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:23 crc kubenswrapper[4779]: I0929 09:47:23.583287 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5102d17-8f46-47e2-8a21-d8afec790069-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:23 crc kubenswrapper[4779]: I0929 09:47:23.583297 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9pgjq\" (UniqueName: \"kubernetes.io/projected/e5102d17-8f46-47e2-8a21-d8afec790069-kube-api-access-9pgjq\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:24 crc kubenswrapper[4779]: I0929 09:47:24.439401 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5c4564557-zhwlz" Sep 29 09:47:24 crc kubenswrapper[4779]: I0929 09:47:24.442300 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-9l6pm" Sep 29 09:47:24 crc kubenswrapper[4779]: I0929 09:47:24.445264 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" event={"ID":"f1a5d3a7-37d9-4a87-864c-e4af7f504a19","Type":"ContainerStarted","Data":"3c3dea31b0e7eb572818f728d7b074a8ac3d1e14ba537ebb0fed907a0fa98d28"} Sep 29 09:47:24 crc kubenswrapper[4779]: I0929 09:47:24.466582 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5c4564557-zhwlz" podStartSLOduration=8.466558466 podStartE2EDuration="8.466558466s" podCreationTimestamp="2025-09-29 09:47:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:47:24.456932628 +0000 UTC m=+1076.438256542" watchObservedRunningTime="2025-09-29 09:47:24.466558466 +0000 UTC m=+1076.447882370" Sep 29 09:47:24 crc kubenswrapper[4779]: I0929 09:47:24.709633 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-6cc6cc97c7-9ffpm"] Sep 29 09:47:24 crc kubenswrapper[4779]: E0929 09:47:24.710452 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5102d17-8f46-47e2-8a21-d8afec790069" containerName="barbican-db-sync" Sep 29 09:47:24 crc kubenswrapper[4779]: I0929 09:47:24.710479 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5102d17-8f46-47e2-8a21-d8afec790069" containerName="barbican-db-sync" Sep 29 09:47:24 crc kubenswrapper[4779]: I0929 09:47:24.710694 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5102d17-8f46-47e2-8a21-d8afec790069" containerName="barbican-db-sync" Sep 29 09:47:24 crc kubenswrapper[4779]: I0929 09:47:24.712152 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-6cc6cc97c7-9ffpm" Sep 29 09:47:24 crc kubenswrapper[4779]: I0929 09:47:24.722632 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Sep 29 09:47:24 crc kubenswrapper[4779]: I0929 09:47:24.722828 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Sep 29 09:47:24 crc kubenswrapper[4779]: I0929 09:47:24.723008 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-rszr5" Sep 29 09:47:24 crc kubenswrapper[4779]: I0929 09:47:24.838058 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-5565dc4c94-dr8wr"] Sep 29 09:47:24 crc kubenswrapper[4779]: I0929 09:47:24.839357 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-6cc6cc97c7-9ffpm"] Sep 29 09:47:24 crc kubenswrapper[4779]: I0929 09:47:24.839384 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-5565dc4c94-dr8wr"] Sep 29 09:47:24 crc kubenswrapper[4779]: I0929 09:47:24.839396 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c4564557-zhwlz"] Sep 29 09:47:24 crc kubenswrapper[4779]: I0929 09:47:24.839531 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-5565dc4c94-dr8wr" Sep 29 09:47:24 crc kubenswrapper[4779]: I0929 09:47:24.848380 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Sep 29 09:47:24 crc kubenswrapper[4779]: I0929 09:47:24.859468 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-646f8b549-z5mvs"] Sep 29 09:47:24 crc kubenswrapper[4779]: I0929 09:47:24.861060 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-646f8b549-z5mvs" Sep 29 09:47:24 crc kubenswrapper[4779]: I0929 09:47:24.889036 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-646f8b549-z5mvs"] Sep 29 09:47:24 crc kubenswrapper[4779]: I0929 09:47:24.905338 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e81a45ce-9e33-41f4-9cf2-aa44749e66e3-logs\") pod \"barbican-worker-6cc6cc97c7-9ffpm\" (UID: \"e81a45ce-9e33-41f4-9cf2-aa44749e66e3\") " pod="openstack/barbican-worker-6cc6cc97c7-9ffpm" Sep 29 09:47:24 crc kubenswrapper[4779]: I0929 09:47:24.905403 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e81a45ce-9e33-41f4-9cf2-aa44749e66e3-config-data-custom\") pod \"barbican-worker-6cc6cc97c7-9ffpm\" (UID: \"e81a45ce-9e33-41f4-9cf2-aa44749e66e3\") " pod="openstack/barbican-worker-6cc6cc97c7-9ffpm" Sep 29 09:47:24 crc kubenswrapper[4779]: I0929 09:47:24.905421 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tr6p5\" (UniqueName: \"kubernetes.io/projected/e81a45ce-9e33-41f4-9cf2-aa44749e66e3-kube-api-access-tr6p5\") pod \"barbican-worker-6cc6cc97c7-9ffpm\" (UID: \"e81a45ce-9e33-41f4-9cf2-aa44749e66e3\") " pod="openstack/barbican-worker-6cc6cc97c7-9ffpm" Sep 29 09:47:24 crc kubenswrapper[4779]: I0929 09:47:24.905443 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e81a45ce-9e33-41f4-9cf2-aa44749e66e3-combined-ca-bundle\") pod \"barbican-worker-6cc6cc97c7-9ffpm\" (UID: \"e81a45ce-9e33-41f4-9cf2-aa44749e66e3\") " pod="openstack/barbican-worker-6cc6cc97c7-9ffpm" Sep 29 09:47:24 crc kubenswrapper[4779]: I0929 09:47:24.905511 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e81a45ce-9e33-41f4-9cf2-aa44749e66e3-config-data\") pod \"barbican-worker-6cc6cc97c7-9ffpm\" (UID: \"e81a45ce-9e33-41f4-9cf2-aa44749e66e3\") " pod="openstack/barbican-worker-6cc6cc97c7-9ffpm" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.000053 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-77d4c87646-5g4ds"] Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.002009 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-77d4c87646-5g4ds" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.005831 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.007362 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fdf2fc8-ee7f-4a6b-b9c8-b90e96f8f75c-config-data\") pod \"barbican-keystone-listener-5565dc4c94-dr8wr\" (UID: \"8fdf2fc8-ee7f-4a6b-b9c8-b90e96f8f75c\") " pod="openstack/barbican-keystone-listener-5565dc4c94-dr8wr" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.007399 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/30b112ba-d459-4229-a0d5-a327cdf7d39a-dns-svc\") pod \"dnsmasq-dns-646f8b549-z5mvs\" (UID: \"30b112ba-d459-4229-a0d5-a327cdf7d39a\") " pod="openstack/dnsmasq-dns-646f8b549-z5mvs" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.007434 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8fdf2fc8-ee7f-4a6b-b9c8-b90e96f8f75c-config-data-custom\") pod \"barbican-keystone-listener-5565dc4c94-dr8wr\" (UID: \"8fdf2fc8-ee7f-4a6b-b9c8-b90e96f8f75c\") " pod="openstack/barbican-keystone-listener-5565dc4c94-dr8wr" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.007456 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j49nf\" (UniqueName: \"kubernetes.io/projected/30b112ba-d459-4229-a0d5-a327cdf7d39a-kube-api-access-j49nf\") pod \"dnsmasq-dns-646f8b549-z5mvs\" (UID: \"30b112ba-d459-4229-a0d5-a327cdf7d39a\") " pod="openstack/dnsmasq-dns-646f8b549-z5mvs" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.007487 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/30b112ba-d459-4229-a0d5-a327cdf7d39a-ovsdbserver-sb\") pod \"dnsmasq-dns-646f8b549-z5mvs\" (UID: \"30b112ba-d459-4229-a0d5-a327cdf7d39a\") " pod="openstack/dnsmasq-dns-646f8b549-z5mvs" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.007520 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e81a45ce-9e33-41f4-9cf2-aa44749e66e3-config-data\") pod \"barbican-worker-6cc6cc97c7-9ffpm\" (UID: \"e81a45ce-9e33-41f4-9cf2-aa44749e66e3\") " pod="openstack/barbican-worker-6cc6cc97c7-9ffpm" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.007544 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pdwzf\" (UniqueName: \"kubernetes.io/projected/8fdf2fc8-ee7f-4a6b-b9c8-b90e96f8f75c-kube-api-access-pdwzf\") pod \"barbican-keystone-listener-5565dc4c94-dr8wr\" (UID: \"8fdf2fc8-ee7f-4a6b-b9c8-b90e96f8f75c\") " pod="openstack/barbican-keystone-listener-5565dc4c94-dr8wr" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.007566 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/30b112ba-d459-4229-a0d5-a327cdf7d39a-config\") pod \"dnsmasq-dns-646f8b549-z5mvs\" (UID: \"30b112ba-d459-4229-a0d5-a327cdf7d39a\") " pod="openstack/dnsmasq-dns-646f8b549-z5mvs" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.007597 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e81a45ce-9e33-41f4-9cf2-aa44749e66e3-logs\") pod \"barbican-worker-6cc6cc97c7-9ffpm\" (UID: \"e81a45ce-9e33-41f4-9cf2-aa44749e66e3\") " pod="openstack/barbican-worker-6cc6cc97c7-9ffpm" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.007621 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fdf2fc8-ee7f-4a6b-b9c8-b90e96f8f75c-combined-ca-bundle\") pod \"barbican-keystone-listener-5565dc4c94-dr8wr\" (UID: \"8fdf2fc8-ee7f-4a6b-b9c8-b90e96f8f75c\") " pod="openstack/barbican-keystone-listener-5565dc4c94-dr8wr" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.007652 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8fdf2fc8-ee7f-4a6b-b9c8-b90e96f8f75c-logs\") pod \"barbican-keystone-listener-5565dc4c94-dr8wr\" (UID: \"8fdf2fc8-ee7f-4a6b-b9c8-b90e96f8f75c\") " pod="openstack/barbican-keystone-listener-5565dc4c94-dr8wr" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.007668 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e81a45ce-9e33-41f4-9cf2-aa44749e66e3-config-data-custom\") pod \"barbican-worker-6cc6cc97c7-9ffpm\" (UID: \"e81a45ce-9e33-41f4-9cf2-aa44749e66e3\") " pod="openstack/barbican-worker-6cc6cc97c7-9ffpm" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.007686 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tr6p5\" (UniqueName: \"kubernetes.io/projected/e81a45ce-9e33-41f4-9cf2-aa44749e66e3-kube-api-access-tr6p5\") pod \"barbican-worker-6cc6cc97c7-9ffpm\" (UID: \"e81a45ce-9e33-41f4-9cf2-aa44749e66e3\") " pod="openstack/barbican-worker-6cc6cc97c7-9ffpm" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.007710 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e81a45ce-9e33-41f4-9cf2-aa44749e66e3-combined-ca-bundle\") pod \"barbican-worker-6cc6cc97c7-9ffpm\" (UID: \"e81a45ce-9e33-41f4-9cf2-aa44749e66e3\") " pod="openstack/barbican-worker-6cc6cc97c7-9ffpm" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.007733 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/30b112ba-d459-4229-a0d5-a327cdf7d39a-ovsdbserver-nb\") pod \"dnsmasq-dns-646f8b549-z5mvs\" (UID: \"30b112ba-d459-4229-a0d5-a327cdf7d39a\") " pod="openstack/dnsmasq-dns-646f8b549-z5mvs" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.009502 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e81a45ce-9e33-41f4-9cf2-aa44749e66e3-logs\") pod \"barbican-worker-6cc6cc97c7-9ffpm\" (UID: \"e81a45ce-9e33-41f4-9cf2-aa44749e66e3\") " pod="openstack/barbican-worker-6cc6cc97c7-9ffpm" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.010471 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-77d4c87646-5g4ds"] Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.015845 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e81a45ce-9e33-41f4-9cf2-aa44749e66e3-config-data-custom\") pod \"barbican-worker-6cc6cc97c7-9ffpm\" (UID: \"e81a45ce-9e33-41f4-9cf2-aa44749e66e3\") " pod="openstack/barbican-worker-6cc6cc97c7-9ffpm" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.039609 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e81a45ce-9e33-41f4-9cf2-aa44749e66e3-combined-ca-bundle\") pod \"barbican-worker-6cc6cc97c7-9ffpm\" (UID: \"e81a45ce-9e33-41f4-9cf2-aa44749e66e3\") " pod="openstack/barbican-worker-6cc6cc97c7-9ffpm" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.039983 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e81a45ce-9e33-41f4-9cf2-aa44749e66e3-config-data\") pod \"barbican-worker-6cc6cc97c7-9ffpm\" (UID: \"e81a45ce-9e33-41f4-9cf2-aa44749e66e3\") " pod="openstack/barbican-worker-6cc6cc97c7-9ffpm" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.044172 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tr6p5\" (UniqueName: \"kubernetes.io/projected/e81a45ce-9e33-41f4-9cf2-aa44749e66e3-kube-api-access-tr6p5\") pod \"barbican-worker-6cc6cc97c7-9ffpm\" (UID: \"e81a45ce-9e33-41f4-9cf2-aa44749e66e3\") " pod="openstack/barbican-worker-6cc6cc97c7-9ffpm" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.105180 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-6cc6cc97c7-9ffpm" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.109764 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/30b112ba-d459-4229-a0d5-a327cdf7d39a-ovsdbserver-sb\") pod \"dnsmasq-dns-646f8b549-z5mvs\" (UID: \"30b112ba-d459-4229-a0d5-a327cdf7d39a\") " pod="openstack/dnsmasq-dns-646f8b549-z5mvs" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.109835 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45-config-data\") pod \"barbican-api-77d4c87646-5g4ds\" (UID: \"cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45\") " pod="openstack/barbican-api-77d4c87646-5g4ds" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.109869 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pdwzf\" (UniqueName: \"kubernetes.io/projected/8fdf2fc8-ee7f-4a6b-b9c8-b90e96f8f75c-kube-api-access-pdwzf\") pod \"barbican-keystone-listener-5565dc4c94-dr8wr\" (UID: \"8fdf2fc8-ee7f-4a6b-b9c8-b90e96f8f75c\") " pod="openstack/barbican-keystone-listener-5565dc4c94-dr8wr" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.109898 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/30b112ba-d459-4229-a0d5-a327cdf7d39a-config\") pod \"dnsmasq-dns-646f8b549-z5mvs\" (UID: \"30b112ba-d459-4229-a0d5-a327cdf7d39a\") " pod="openstack/dnsmasq-dns-646f8b549-z5mvs" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.110273 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45-combined-ca-bundle\") pod \"barbican-api-77d4c87646-5g4ds\" (UID: \"cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45\") " pod="openstack/barbican-api-77d4c87646-5g4ds" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.110300 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fdf2fc8-ee7f-4a6b-b9c8-b90e96f8f75c-combined-ca-bundle\") pod \"barbican-keystone-listener-5565dc4c94-dr8wr\" (UID: \"8fdf2fc8-ee7f-4a6b-b9c8-b90e96f8f75c\") " pod="openstack/barbican-keystone-listener-5565dc4c94-dr8wr" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.110337 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8fdf2fc8-ee7f-4a6b-b9c8-b90e96f8f75c-logs\") pod \"barbican-keystone-listener-5565dc4c94-dr8wr\" (UID: \"8fdf2fc8-ee7f-4a6b-b9c8-b90e96f8f75c\") " pod="openstack/barbican-keystone-listener-5565dc4c94-dr8wr" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.110363 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45-logs\") pod \"barbican-api-77d4c87646-5g4ds\" (UID: \"cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45\") " pod="openstack/barbican-api-77d4c87646-5g4ds" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.110388 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/30b112ba-d459-4229-a0d5-a327cdf7d39a-ovsdbserver-nb\") pod \"dnsmasq-dns-646f8b549-z5mvs\" (UID: \"30b112ba-d459-4229-a0d5-a327cdf7d39a\") " pod="openstack/dnsmasq-dns-646f8b549-z5mvs" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.110402 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45-config-data-custom\") pod \"barbican-api-77d4c87646-5g4ds\" (UID: \"cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45\") " pod="openstack/barbican-api-77d4c87646-5g4ds" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.110427 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fdf2fc8-ee7f-4a6b-b9c8-b90e96f8f75c-config-data\") pod \"barbican-keystone-listener-5565dc4c94-dr8wr\" (UID: \"8fdf2fc8-ee7f-4a6b-b9c8-b90e96f8f75c\") " pod="openstack/barbican-keystone-listener-5565dc4c94-dr8wr" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.110448 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/30b112ba-d459-4229-a0d5-a327cdf7d39a-dns-svc\") pod \"dnsmasq-dns-646f8b549-z5mvs\" (UID: \"30b112ba-d459-4229-a0d5-a327cdf7d39a\") " pod="openstack/dnsmasq-dns-646f8b549-z5mvs" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.110468 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m6rb8\" (UniqueName: \"kubernetes.io/projected/cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45-kube-api-access-m6rb8\") pod \"barbican-api-77d4c87646-5g4ds\" (UID: \"cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45\") " pod="openstack/barbican-api-77d4c87646-5g4ds" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.110490 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8fdf2fc8-ee7f-4a6b-b9c8-b90e96f8f75c-config-data-custom\") pod \"barbican-keystone-listener-5565dc4c94-dr8wr\" (UID: \"8fdf2fc8-ee7f-4a6b-b9c8-b90e96f8f75c\") " pod="openstack/barbican-keystone-listener-5565dc4c94-dr8wr" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.110591 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j49nf\" (UniqueName: \"kubernetes.io/projected/30b112ba-d459-4229-a0d5-a327cdf7d39a-kube-api-access-j49nf\") pod \"dnsmasq-dns-646f8b549-z5mvs\" (UID: \"30b112ba-d459-4229-a0d5-a327cdf7d39a\") " pod="openstack/dnsmasq-dns-646f8b549-z5mvs" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.111732 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/30b112ba-d459-4229-a0d5-a327cdf7d39a-ovsdbserver-sb\") pod \"dnsmasq-dns-646f8b549-z5mvs\" (UID: \"30b112ba-d459-4229-a0d5-a327cdf7d39a\") " pod="openstack/dnsmasq-dns-646f8b549-z5mvs" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.112429 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/30b112ba-d459-4229-a0d5-a327cdf7d39a-config\") pod \"dnsmasq-dns-646f8b549-z5mvs\" (UID: \"30b112ba-d459-4229-a0d5-a327cdf7d39a\") " pod="openstack/dnsmasq-dns-646f8b549-z5mvs" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.114355 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8fdf2fc8-ee7f-4a6b-b9c8-b90e96f8f75c-logs\") pod \"barbican-keystone-listener-5565dc4c94-dr8wr\" (UID: \"8fdf2fc8-ee7f-4a6b-b9c8-b90e96f8f75c\") " pod="openstack/barbican-keystone-listener-5565dc4c94-dr8wr" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.115215 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/30b112ba-d459-4229-a0d5-a327cdf7d39a-ovsdbserver-nb\") pod \"dnsmasq-dns-646f8b549-z5mvs\" (UID: \"30b112ba-d459-4229-a0d5-a327cdf7d39a\") " pod="openstack/dnsmasq-dns-646f8b549-z5mvs" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.115874 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fdf2fc8-ee7f-4a6b-b9c8-b90e96f8f75c-combined-ca-bundle\") pod \"barbican-keystone-listener-5565dc4c94-dr8wr\" (UID: \"8fdf2fc8-ee7f-4a6b-b9c8-b90e96f8f75c\") " pod="openstack/barbican-keystone-listener-5565dc4c94-dr8wr" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.116550 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/30b112ba-d459-4229-a0d5-a327cdf7d39a-dns-svc\") pod \"dnsmasq-dns-646f8b549-z5mvs\" (UID: \"30b112ba-d459-4229-a0d5-a327cdf7d39a\") " pod="openstack/dnsmasq-dns-646f8b549-z5mvs" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.116733 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.130983 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fdf2fc8-ee7f-4a6b-b9c8-b90e96f8f75c-config-data\") pod \"barbican-keystone-listener-5565dc4c94-dr8wr\" (UID: \"8fdf2fc8-ee7f-4a6b-b9c8-b90e96f8f75c\") " pod="openstack/barbican-keystone-listener-5565dc4c94-dr8wr" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.135513 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8fdf2fc8-ee7f-4a6b-b9c8-b90e96f8f75c-config-data-custom\") pod \"barbican-keystone-listener-5565dc4c94-dr8wr\" (UID: \"8fdf2fc8-ee7f-4a6b-b9c8-b90e96f8f75c\") " pod="openstack/barbican-keystone-listener-5565dc4c94-dr8wr" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.143133 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j49nf\" (UniqueName: \"kubernetes.io/projected/30b112ba-d459-4229-a0d5-a327cdf7d39a-kube-api-access-j49nf\") pod \"dnsmasq-dns-646f8b549-z5mvs\" (UID: \"30b112ba-d459-4229-a0d5-a327cdf7d39a\") " pod="openstack/dnsmasq-dns-646f8b549-z5mvs" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.149496 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pdwzf\" (UniqueName: \"kubernetes.io/projected/8fdf2fc8-ee7f-4a6b-b9c8-b90e96f8f75c-kube-api-access-pdwzf\") pod \"barbican-keystone-listener-5565dc4c94-dr8wr\" (UID: \"8fdf2fc8-ee7f-4a6b-b9c8-b90e96f8f75c\") " pod="openstack/barbican-keystone-listener-5565dc4c94-dr8wr" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.206310 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-646f8b549-z5mvs" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.206736 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-5565dc4c94-dr8wr" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.214419 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2d380bf6-3887-44a2-91b0-b34c376d1ee6-logs\") pod \"2d380bf6-3887-44a2-91b0-b34c376d1ee6\" (UID: \"2d380bf6-3887-44a2-91b0-b34c376d1ee6\") " Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.214510 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d380bf6-3887-44a2-91b0-b34c376d1ee6-combined-ca-bundle\") pod \"2d380bf6-3887-44a2-91b0-b34c376d1ee6\" (UID: \"2d380bf6-3887-44a2-91b0-b34c376d1ee6\") " Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.214557 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5xxsv\" (UniqueName: \"kubernetes.io/projected/2d380bf6-3887-44a2-91b0-b34c376d1ee6-kube-api-access-5xxsv\") pod \"2d380bf6-3887-44a2-91b0-b34c376d1ee6\" (UID: \"2d380bf6-3887-44a2-91b0-b34c376d1ee6\") " Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.214585 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d380bf6-3887-44a2-91b0-b34c376d1ee6-config-data\") pod \"2d380bf6-3887-44a2-91b0-b34c376d1ee6\" (UID: \"2d380bf6-3887-44a2-91b0-b34c376d1ee6\") " Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.214620 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/2d380bf6-3887-44a2-91b0-b34c376d1ee6-custom-prometheus-ca\") pod \"2d380bf6-3887-44a2-91b0-b34c376d1ee6\" (UID: \"2d380bf6-3887-44a2-91b0-b34c376d1ee6\") " Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.214950 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45-combined-ca-bundle\") pod \"barbican-api-77d4c87646-5g4ds\" (UID: \"cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45\") " pod="openstack/barbican-api-77d4c87646-5g4ds" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.215009 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45-logs\") pod \"barbican-api-77d4c87646-5g4ds\" (UID: \"cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45\") " pod="openstack/barbican-api-77d4c87646-5g4ds" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.215034 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45-config-data-custom\") pod \"barbican-api-77d4c87646-5g4ds\" (UID: \"cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45\") " pod="openstack/barbican-api-77d4c87646-5g4ds" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.215072 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m6rb8\" (UniqueName: \"kubernetes.io/projected/cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45-kube-api-access-m6rb8\") pod \"barbican-api-77d4c87646-5g4ds\" (UID: \"cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45\") " pod="openstack/barbican-api-77d4c87646-5g4ds" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.215134 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45-config-data\") pod \"barbican-api-77d4c87646-5g4ds\" (UID: \"cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45\") " pod="openstack/barbican-api-77d4c87646-5g4ds" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.217758 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2d380bf6-3887-44a2-91b0-b34c376d1ee6-logs" (OuterVolumeSpecName: "logs") pod "2d380bf6-3887-44a2-91b0-b34c376d1ee6" (UID: "2d380bf6-3887-44a2-91b0-b34c376d1ee6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.218082 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45-logs\") pod \"barbican-api-77d4c87646-5g4ds\" (UID: \"cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45\") " pod="openstack/barbican-api-77d4c87646-5g4ds" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.257766 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45-config-data\") pod \"barbican-api-77d4c87646-5g4ds\" (UID: \"cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45\") " pod="openstack/barbican-api-77d4c87646-5g4ds" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.258166 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d380bf6-3887-44a2-91b0-b34c376d1ee6-kube-api-access-5xxsv" (OuterVolumeSpecName: "kube-api-access-5xxsv") pod "2d380bf6-3887-44a2-91b0-b34c376d1ee6" (UID: "2d380bf6-3887-44a2-91b0-b34c376d1ee6"). InnerVolumeSpecName "kube-api-access-5xxsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.278618 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45-combined-ca-bundle\") pod \"barbican-api-77d4c87646-5g4ds\" (UID: \"cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45\") " pod="openstack/barbican-api-77d4c87646-5g4ds" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.282450 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45-config-data-custom\") pod \"barbican-api-77d4c87646-5g4ds\" (UID: \"cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45\") " pod="openstack/barbican-api-77d4c87646-5g4ds" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.313488 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m6rb8\" (UniqueName: \"kubernetes.io/projected/cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45-kube-api-access-m6rb8\") pod \"barbican-api-77d4c87646-5g4ds\" (UID: \"cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45\") " pod="openstack/barbican-api-77d4c87646-5g4ds" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.322179 4779 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2d380bf6-3887-44a2-91b0-b34c376d1ee6-logs\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.322217 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5xxsv\" (UniqueName: \"kubernetes.io/projected/2d380bf6-3887-44a2-91b0-b34c376d1ee6-kube-api-access-5xxsv\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.339121 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d380bf6-3887-44a2-91b0-b34c376d1ee6-custom-prometheus-ca" (OuterVolumeSpecName: "custom-prometheus-ca") pod "2d380bf6-3887-44a2-91b0-b34c376d1ee6" (UID: "2d380bf6-3887-44a2-91b0-b34c376d1ee6"). InnerVolumeSpecName "custom-prometheus-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.341684 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d380bf6-3887-44a2-91b0-b34c376d1ee6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2d380bf6-3887-44a2-91b0-b34c376d1ee6" (UID: "2d380bf6-3887-44a2-91b0-b34c376d1ee6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.417353 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-77d4c87646-5g4ds" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.427774 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d380bf6-3887-44a2-91b0-b34c376d1ee6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.427826 4779 reconciler_common.go:293] "Volume detached for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/2d380bf6-3887-44a2-91b0-b34c376d1ee6-custom-prometheus-ca\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.463952 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d380bf6-3887-44a2-91b0-b34c376d1ee6-config-data" (OuterVolumeSpecName: "config-data") pod "2d380bf6-3887-44a2-91b0-b34c376d1ee6" (UID: "2d380bf6-3887-44a2-91b0-b34c376d1ee6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.478207 4779 generic.go:334] "Generic (PLEG): container finished" podID="2d380bf6-3887-44a2-91b0-b34c376d1ee6" containerID="945728a2e7c29de6ae7aa1504cf5070635ea6c2e42be481b32f3a251dc0e7fce" exitCode=0 Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.478266 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"2d380bf6-3887-44a2-91b0-b34c376d1ee6","Type":"ContainerDied","Data":"945728a2e7c29de6ae7aa1504cf5070635ea6c2e42be481b32f3a251dc0e7fce"} Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.478293 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"2d380bf6-3887-44a2-91b0-b34c376d1ee6","Type":"ContainerDied","Data":"446d8482815e04a48cbc2d4fa47166bca5bda6cce8f66f96d48e4ed3bda9b6bf"} Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.478310 4779 scope.go:117] "RemoveContainer" containerID="945728a2e7c29de6ae7aa1504cf5070635ea6c2e42be481b32f3a251dc0e7fce" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.478422 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.529924 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d380bf6-3887-44a2-91b0-b34c376d1ee6-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.531637 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"785e1b61-3f05-4ac1-ade7-57c25fe6f177","Type":"ContainerStarted","Data":"faee0e298717649e7e4d7b0193c03af7eeb0834725b7e8cc713d239054d3e16a"} Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.531820 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="785e1b61-3f05-4ac1-ade7-57c25fe6f177" containerName="ceilometer-central-agent" containerID="cri-o://e476822b62ee12988f0721149f099ee1097a135a452a6fde2057bf3720bfd0ca" gracePeriod=30 Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.532060 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.532121 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="785e1b61-3f05-4ac1-ade7-57c25fe6f177" containerName="sg-core" containerID="cri-o://f17b5eb38581eabcc30f952ba49ed29c2beb5ce49347b128736c1713c162d65d" gracePeriod=30 Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.532255 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="785e1b61-3f05-4ac1-ade7-57c25fe6f177" containerName="proxy-httpd" containerID="cri-o://faee0e298717649e7e4d7b0193c03af7eeb0834725b7e8cc713d239054d3e16a" gracePeriod=30 Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.532329 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="785e1b61-3f05-4ac1-ade7-57c25fe6f177" containerName="ceilometer-notification-agent" containerID="cri-o://54bdd4db49a7cf15f5f139bc7a5d6f441c6efde4ed6eb5046b3a9efd35c90b10" gracePeriod=30 Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.615809 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c4564557-zhwlz" event={"ID":"b699141c-f160-495b-9954-891be088873e","Type":"ContainerStarted","Data":"7818b8027915b4e649d0b367cc73a4816284973b7956022148d3afd66fd08207"} Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.616514 4779 scope.go:117] "RemoveContainer" containerID="88060f87ed244aea5c9e79b7418ac7b6b77b220398530f74559c65915c5d7deb" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.656684 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=4.416121229 podStartE2EDuration="41.656664047s" podCreationTimestamp="2025-09-29 09:46:44 +0000 UTC" firstStartedPulling="2025-09-29 09:46:46.838275663 +0000 UTC m=+1038.819599567" lastFinishedPulling="2025-09-29 09:47:24.078818481 +0000 UTC m=+1076.060142385" observedRunningTime="2025-09-29 09:47:25.615667834 +0000 UTC m=+1077.596991728" watchObservedRunningTime="2025-09-29 09:47:25.656664047 +0000 UTC m=+1077.637987941" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.695161 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-api-0"] Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.712391 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/watcher-api-0"] Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.714809 4779 scope.go:117] "RemoveContainer" containerID="dc57f62b4e8aac95d8856e4547b1ab206d4fe4ef4ceeb48680216143e21ffd0b" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.729944 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-api-0"] Sep 29 09:47:25 crc kubenswrapper[4779]: E0929 09:47:25.730533 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d380bf6-3887-44a2-91b0-b34c376d1ee6" containerName="watcher-api" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.730632 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d380bf6-3887-44a2-91b0-b34c376d1ee6" containerName="watcher-api" Sep 29 09:47:25 crc kubenswrapper[4779]: E0929 09:47:25.730709 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d380bf6-3887-44a2-91b0-b34c376d1ee6" containerName="watcher-api-log" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.730757 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d380bf6-3887-44a2-91b0-b34c376d1ee6" containerName="watcher-api-log" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.730992 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d380bf6-3887-44a2-91b0-b34c376d1ee6" containerName="watcher-api" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.731075 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d380bf6-3887-44a2-91b0-b34c376d1ee6" containerName="watcher-api-log" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.732129 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.738427 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-api-0"] Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.738741 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-api-config-data" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.738847 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-watcher-internal-svc" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.739070 4779 scope.go:117] "RemoveContainer" containerID="945728a2e7c29de6ae7aa1504cf5070635ea6c2e42be481b32f3a251dc0e7fce" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.744635 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-watcher-public-svc" Sep 29 09:47:25 crc kubenswrapper[4779]: E0929 09:47:25.744751 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"945728a2e7c29de6ae7aa1504cf5070635ea6c2e42be481b32f3a251dc0e7fce\": container with ID starting with 945728a2e7c29de6ae7aa1504cf5070635ea6c2e42be481b32f3a251dc0e7fce not found: ID does not exist" containerID="945728a2e7c29de6ae7aa1504cf5070635ea6c2e42be481b32f3a251dc0e7fce" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.744784 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"945728a2e7c29de6ae7aa1504cf5070635ea6c2e42be481b32f3a251dc0e7fce"} err="failed to get container status \"945728a2e7c29de6ae7aa1504cf5070635ea6c2e42be481b32f3a251dc0e7fce\": rpc error: code = NotFound desc = could not find container \"945728a2e7c29de6ae7aa1504cf5070635ea6c2e42be481b32f3a251dc0e7fce\": container with ID starting with 945728a2e7c29de6ae7aa1504cf5070635ea6c2e42be481b32f3a251dc0e7fce not found: ID does not exist" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.744817 4779 scope.go:117] "RemoveContainer" containerID="88060f87ed244aea5c9e79b7418ac7b6b77b220398530f74559c65915c5d7deb" Sep 29 09:47:25 crc kubenswrapper[4779]: E0929 09:47:25.747442 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"88060f87ed244aea5c9e79b7418ac7b6b77b220398530f74559c65915c5d7deb\": container with ID starting with 88060f87ed244aea5c9e79b7418ac7b6b77b220398530f74559c65915c5d7deb not found: ID does not exist" containerID="88060f87ed244aea5c9e79b7418ac7b6b77b220398530f74559c65915c5d7deb" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.747467 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"88060f87ed244aea5c9e79b7418ac7b6b77b220398530f74559c65915c5d7deb"} err="failed to get container status \"88060f87ed244aea5c9e79b7418ac7b6b77b220398530f74559c65915c5d7deb\": rpc error: code = NotFound desc = could not find container \"88060f87ed244aea5c9e79b7418ac7b6b77b220398530f74559c65915c5d7deb\": container with ID starting with 88060f87ed244aea5c9e79b7418ac7b6b77b220398530f74559c65915c5d7deb not found: ID does not exist" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.843084 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/83913750-39b3-4380-a4a1-b4f900d027d7-public-tls-certs\") pod \"watcher-api-0\" (UID: \"83913750-39b3-4380-a4a1-b4f900d027d7\") " pod="openstack/watcher-api-0" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.843336 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/83913750-39b3-4380-a4a1-b4f900d027d7-internal-tls-certs\") pod \"watcher-api-0\" (UID: \"83913750-39b3-4380-a4a1-b4f900d027d7\") " pod="openstack/watcher-api-0" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.843381 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8qpjz\" (UniqueName: \"kubernetes.io/projected/83913750-39b3-4380-a4a1-b4f900d027d7-kube-api-access-8qpjz\") pod \"watcher-api-0\" (UID: \"83913750-39b3-4380-a4a1-b4f900d027d7\") " pod="openstack/watcher-api-0" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.844104 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83913750-39b3-4380-a4a1-b4f900d027d7-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"83913750-39b3-4380-a4a1-b4f900d027d7\") " pod="openstack/watcher-api-0" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.844259 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/83913750-39b3-4380-a4a1-b4f900d027d7-logs\") pod \"watcher-api-0\" (UID: \"83913750-39b3-4380-a4a1-b4f900d027d7\") " pod="openstack/watcher-api-0" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.844588 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/83913750-39b3-4380-a4a1-b4f900d027d7-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"83913750-39b3-4380-a4a1-b4f900d027d7\") " pod="openstack/watcher-api-0" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.844758 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/83913750-39b3-4380-a4a1-b4f900d027d7-config-data\") pod \"watcher-api-0\" (UID: \"83913750-39b3-4380-a4a1-b4f900d027d7\") " pod="openstack/watcher-api-0" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.869942 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-6cc6cc97c7-9ffpm"] Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.946778 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/83913750-39b3-4380-a4a1-b4f900d027d7-logs\") pod \"watcher-api-0\" (UID: \"83913750-39b3-4380-a4a1-b4f900d027d7\") " pod="openstack/watcher-api-0" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.947501 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/83913750-39b3-4380-a4a1-b4f900d027d7-logs\") pod \"watcher-api-0\" (UID: \"83913750-39b3-4380-a4a1-b4f900d027d7\") " pod="openstack/watcher-api-0" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.947635 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/83913750-39b3-4380-a4a1-b4f900d027d7-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"83913750-39b3-4380-a4a1-b4f900d027d7\") " pod="openstack/watcher-api-0" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.947760 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/83913750-39b3-4380-a4a1-b4f900d027d7-config-data\") pod \"watcher-api-0\" (UID: \"83913750-39b3-4380-a4a1-b4f900d027d7\") " pod="openstack/watcher-api-0" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.947885 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/83913750-39b3-4380-a4a1-b4f900d027d7-public-tls-certs\") pod \"watcher-api-0\" (UID: \"83913750-39b3-4380-a4a1-b4f900d027d7\") " pod="openstack/watcher-api-0" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.947978 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/83913750-39b3-4380-a4a1-b4f900d027d7-internal-tls-certs\") pod \"watcher-api-0\" (UID: \"83913750-39b3-4380-a4a1-b4f900d027d7\") " pod="openstack/watcher-api-0" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.948062 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8qpjz\" (UniqueName: \"kubernetes.io/projected/83913750-39b3-4380-a4a1-b4f900d027d7-kube-api-access-8qpjz\") pod \"watcher-api-0\" (UID: \"83913750-39b3-4380-a4a1-b4f900d027d7\") " pod="openstack/watcher-api-0" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.948175 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83913750-39b3-4380-a4a1-b4f900d027d7-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"83913750-39b3-4380-a4a1-b4f900d027d7\") " pod="openstack/watcher-api-0" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.951042 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/83913750-39b3-4380-a4a1-b4f900d027d7-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"83913750-39b3-4380-a4a1-b4f900d027d7\") " pod="openstack/watcher-api-0" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.951377 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/83913750-39b3-4380-a4a1-b4f900d027d7-public-tls-certs\") pod \"watcher-api-0\" (UID: \"83913750-39b3-4380-a4a1-b4f900d027d7\") " pod="openstack/watcher-api-0" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.951682 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/83913750-39b3-4380-a4a1-b4f900d027d7-internal-tls-certs\") pod \"watcher-api-0\" (UID: \"83913750-39b3-4380-a4a1-b4f900d027d7\") " pod="openstack/watcher-api-0" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.953046 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83913750-39b3-4380-a4a1-b4f900d027d7-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"83913750-39b3-4380-a4a1-b4f900d027d7\") " pod="openstack/watcher-api-0" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.954936 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/83913750-39b3-4380-a4a1-b4f900d027d7-config-data\") pod \"watcher-api-0\" (UID: \"83913750-39b3-4380-a4a1-b4f900d027d7\") " pod="openstack/watcher-api-0" Sep 29 09:47:25 crc kubenswrapper[4779]: I0929 09:47:25.979748 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8qpjz\" (UniqueName: \"kubernetes.io/projected/83913750-39b3-4380-a4a1-b4f900d027d7-kube-api-access-8qpjz\") pod \"watcher-api-0\" (UID: \"83913750-39b3-4380-a4a1-b4f900d027d7\") " pod="openstack/watcher-api-0" Sep 29 09:47:26 crc kubenswrapper[4779]: I0929 09:47:26.083769 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Sep 29 09:47:26 crc kubenswrapper[4779]: I0929 09:47:26.102560 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-5565dc4c94-dr8wr"] Sep 29 09:47:26 crc kubenswrapper[4779]: W0929 09:47:26.154498 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8fdf2fc8_ee7f_4a6b_b9c8_b90e96f8f75c.slice/crio-fa3c493fccf9df5e310b422996b570f0ef62676052fdbac5c8576ecd2aa0ee7e WatchSource:0}: Error finding container fa3c493fccf9df5e310b422996b570f0ef62676052fdbac5c8576ecd2aa0ee7e: Status 404 returned error can't find the container with id fa3c493fccf9df5e310b422996b570f0ef62676052fdbac5c8576ecd2aa0ee7e Sep 29 09:47:26 crc kubenswrapper[4779]: I0929 09:47:26.199979 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-77d4c87646-5g4ds"] Sep 29 09:47:26 crc kubenswrapper[4779]: I0929 09:47:26.208292 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-646f8b549-z5mvs"] Sep 29 09:47:26 crc kubenswrapper[4779]: W0929 09:47:26.215446 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcd8d2bbf_a1bd_4bc6_8212_91eceefdfb45.slice/crio-20d7b637393245d57df389a420c8e1305707d935c7441aed32e2b96b2ce32fb9 WatchSource:0}: Error finding container 20d7b637393245d57df389a420c8e1305707d935c7441aed32e2b96b2ce32fb9: Status 404 returned error can't find the container with id 20d7b637393245d57df389a420c8e1305707d935c7441aed32e2b96b2ce32fb9 Sep 29 09:47:26 crc kubenswrapper[4779]: W0929 09:47:26.216658 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod30b112ba_d459_4229_a0d5_a327cdf7d39a.slice/crio-110aa42efb22aa4994c62725b4678e7d5dd58fce9a18d685506905c834046e67 WatchSource:0}: Error finding container 110aa42efb22aa4994c62725b4678e7d5dd58fce9a18d685506905c834046e67: Status 404 returned error can't find the container with id 110aa42efb22aa4994c62725b4678e7d5dd58fce9a18d685506905c834046e67 Sep 29 09:47:26 crc kubenswrapper[4779]: I0929 09:47:26.602097 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"bbf17a9c-86ea-4b10-a73f-81758085521e","Type":"ContainerStarted","Data":"2074df38d4a6558b4d8474afd13099749bbe2af1d008cd9731113868080c2d2e"} Sep 29 09:47:26 crc kubenswrapper[4779]: I0929 09:47:26.608065 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-646f8b549-z5mvs" event={"ID":"30b112ba-d459-4229-a0d5-a327cdf7d39a","Type":"ContainerStarted","Data":"110aa42efb22aa4994c62725b4678e7d5dd58fce9a18d685506905c834046e67"} Sep 29 09:47:26 crc kubenswrapper[4779]: I0929 09:47:26.617064 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-77d4c87646-5g4ds" event={"ID":"cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45","Type":"ContainerStarted","Data":"20d7b637393245d57df389a420c8e1305707d935c7441aed32e2b96b2ce32fb9"} Sep 29 09:47:26 crc kubenswrapper[4779]: I0929 09:47:26.625342 4779 generic.go:334] "Generic (PLEG): container finished" podID="785e1b61-3f05-4ac1-ade7-57c25fe6f177" containerID="faee0e298717649e7e4d7b0193c03af7eeb0834725b7e8cc713d239054d3e16a" exitCode=0 Sep 29 09:47:26 crc kubenswrapper[4779]: I0929 09:47:26.625368 4779 generic.go:334] "Generic (PLEG): container finished" podID="785e1b61-3f05-4ac1-ade7-57c25fe6f177" containerID="f17b5eb38581eabcc30f952ba49ed29c2beb5ce49347b128736c1713c162d65d" exitCode=2 Sep 29 09:47:26 crc kubenswrapper[4779]: I0929 09:47:26.625377 4779 generic.go:334] "Generic (PLEG): container finished" podID="785e1b61-3f05-4ac1-ade7-57c25fe6f177" containerID="e476822b62ee12988f0721149f099ee1097a135a452a6fde2057bf3720bfd0ca" exitCode=0 Sep 29 09:47:26 crc kubenswrapper[4779]: I0929 09:47:26.625422 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"785e1b61-3f05-4ac1-ade7-57c25fe6f177","Type":"ContainerDied","Data":"faee0e298717649e7e4d7b0193c03af7eeb0834725b7e8cc713d239054d3e16a"} Sep 29 09:47:26 crc kubenswrapper[4779]: I0929 09:47:26.625448 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"785e1b61-3f05-4ac1-ade7-57c25fe6f177","Type":"ContainerDied","Data":"f17b5eb38581eabcc30f952ba49ed29c2beb5ce49347b128736c1713c162d65d"} Sep 29 09:47:26 crc kubenswrapper[4779]: I0929 09:47:26.625458 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"785e1b61-3f05-4ac1-ade7-57c25fe6f177","Type":"ContainerDied","Data":"e476822b62ee12988f0721149f099ee1097a135a452a6fde2057bf3720bfd0ca"} Sep 29 09:47:26 crc kubenswrapper[4779]: I0929 09:47:26.631672 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"f67e636b-969b-48ee-bbec-3d8b38b22274","Type":"ContainerStarted","Data":"57d6ed8fa30263cb34a23159118310a096e08b89d83acaef3afa9ecc7a1a48e3"} Sep 29 09:47:26 crc kubenswrapper[4779]: I0929 09:47:26.645756 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6cc6cc97c7-9ffpm" event={"ID":"e81a45ce-9e33-41f4-9cf2-aa44749e66e3","Type":"ContainerStarted","Data":"a0b0e9db67c23a32e38e49d632d24c756a032e12d1897904d199221661da4dd6"} Sep 29 09:47:26 crc kubenswrapper[4779]: I0929 09:47:26.658306 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"bd1b88bc-6a49-42c8-8a07-9887030daf75","Type":"ContainerStarted","Data":"219a0bdf148545207fc39825edbb62537b18e3fbab5e6999e4931f8167f4f9e7"} Sep 29 09:47:26 crc kubenswrapper[4779]: I0929 09:47:26.658767 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="bd1b88bc-6a49-42c8-8a07-9887030daf75" containerName="cinder-api-log" containerID="cri-o://6d416c201fc6350bc36a311c5718e7aa7a01c858bafd3e6cacaf6b4ab23a663a" gracePeriod=30 Sep 29 09:47:26 crc kubenswrapper[4779]: I0929 09:47:26.659089 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Sep 29 09:47:26 crc kubenswrapper[4779]: I0929 09:47:26.659203 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="bd1b88bc-6a49-42c8-8a07-9887030daf75" containerName="cinder-api" containerID="cri-o://219a0bdf148545207fc39825edbb62537b18e3fbab5e6999e4931f8167f4f9e7" gracePeriod=30 Sep 29 09:47:26 crc kubenswrapper[4779]: I0929 09:47:26.662615 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5565dc4c94-dr8wr" event={"ID":"8fdf2fc8-ee7f-4a6b-b9c8-b90e96f8f75c","Type":"ContainerStarted","Data":"fa3c493fccf9df5e310b422996b570f0ef62676052fdbac5c8576ecd2aa0ee7e"} Sep 29 09:47:26 crc kubenswrapper[4779]: I0929 09:47:26.662728 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5c4564557-zhwlz" podUID="b699141c-f160-495b-9954-891be088873e" containerName="dnsmasq-dns" containerID="cri-o://7818b8027915b4e649d0b367cc73a4816284973b7956022148d3afd66fd08207" gracePeriod=10 Sep 29 09:47:26 crc kubenswrapper[4779]: I0929 09:47:26.681766 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=10.681747768 podStartE2EDuration="10.681747768s" podCreationTimestamp="2025-09-29 09:47:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:47:26.67697604 +0000 UTC m=+1078.658299944" watchObservedRunningTime="2025-09-29 09:47:26.681747768 +0000 UTC m=+1078.663071672" Sep 29 09:47:26 crc kubenswrapper[4779]: I0929 09:47:26.748799 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2d380bf6-3887-44a2-91b0-b34c376d1ee6" path="/var/lib/kubelet/pods/2d380bf6-3887-44a2-91b0-b34c376d1ee6/volumes" Sep 29 09:47:26 crc kubenswrapper[4779]: I0929 09:47:26.755486 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-api-0"] Sep 29 09:47:27 crc kubenswrapper[4779]: I0929 09:47:27.511628 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c4564557-zhwlz" Sep 29 09:47:27 crc kubenswrapper[4779]: I0929 09:47:27.593134 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b699141c-f160-495b-9954-891be088873e-ovsdbserver-nb\") pod \"b699141c-f160-495b-9954-891be088873e\" (UID: \"b699141c-f160-495b-9954-891be088873e\") " Sep 29 09:47:27 crc kubenswrapper[4779]: I0929 09:47:27.593185 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qz88t\" (UniqueName: \"kubernetes.io/projected/b699141c-f160-495b-9954-891be088873e-kube-api-access-qz88t\") pod \"b699141c-f160-495b-9954-891be088873e\" (UID: \"b699141c-f160-495b-9954-891be088873e\") " Sep 29 09:47:27 crc kubenswrapper[4779]: I0929 09:47:27.593247 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b699141c-f160-495b-9954-891be088873e-config\") pod \"b699141c-f160-495b-9954-891be088873e\" (UID: \"b699141c-f160-495b-9954-891be088873e\") " Sep 29 09:47:27 crc kubenswrapper[4779]: I0929 09:47:27.593302 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b699141c-f160-495b-9954-891be088873e-dns-svc\") pod \"b699141c-f160-495b-9954-891be088873e\" (UID: \"b699141c-f160-495b-9954-891be088873e\") " Sep 29 09:47:27 crc kubenswrapper[4779]: I0929 09:47:27.594111 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b699141c-f160-495b-9954-891be088873e-ovsdbserver-sb\") pod \"b699141c-f160-495b-9954-891be088873e\" (UID: \"b699141c-f160-495b-9954-891be088873e\") " Sep 29 09:47:27 crc kubenswrapper[4779]: I0929 09:47:27.598865 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b699141c-f160-495b-9954-891be088873e-kube-api-access-qz88t" (OuterVolumeSpecName: "kube-api-access-qz88t") pod "b699141c-f160-495b-9954-891be088873e" (UID: "b699141c-f160-495b-9954-891be088873e"). InnerVolumeSpecName "kube-api-access-qz88t". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:47:27 crc kubenswrapper[4779]: I0929 09:47:27.652441 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b699141c-f160-495b-9954-891be088873e-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "b699141c-f160-495b-9954-891be088873e" (UID: "b699141c-f160-495b-9954-891be088873e"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:47:27 crc kubenswrapper[4779]: I0929 09:47:27.681957 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"83913750-39b3-4380-a4a1-b4f900d027d7","Type":"ContainerStarted","Data":"d0e80c98926a5358f38e595da14fb10dc942636a49776e6ba8597bb3eccb5356"} Sep 29 09:47:27 crc kubenswrapper[4779]: I0929 09:47:27.682308 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"83913750-39b3-4380-a4a1-b4f900d027d7","Type":"ContainerStarted","Data":"9a3ac22cc3b12d9e8a6a1bcab5129d142fbe454b889bc365a85751719c22b975"} Sep 29 09:47:27 crc kubenswrapper[4779]: I0929 09:47:27.682378 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"83913750-39b3-4380-a4a1-b4f900d027d7","Type":"ContainerStarted","Data":"4997d9465da8ad82bc830edee9593b343a00268bcdd5fb02f8f4dad31ce2e47d"} Sep 29 09:47:27 crc kubenswrapper[4779]: I0929 09:47:27.682832 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-api-0" Sep 29 09:47:27 crc kubenswrapper[4779]: I0929 09:47:27.694400 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/watcher-api-0" podUID="83913750-39b3-4380-a4a1-b4f900d027d7" containerName="watcher-api" probeResult="failure" output="Get \"https://10.217.0.166:9322/\": dial tcp 10.217.0.166:9322: connect: connection refused" Sep 29 09:47:27 crc kubenswrapper[4779]: I0929 09:47:27.698760 4779 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b699141c-f160-495b-9954-891be088873e-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:27 crc kubenswrapper[4779]: I0929 09:47:27.698801 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qz88t\" (UniqueName: \"kubernetes.io/projected/b699141c-f160-495b-9954-891be088873e-kube-api-access-qz88t\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:27 crc kubenswrapper[4779]: I0929 09:47:27.708202 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b699141c-f160-495b-9954-891be088873e-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "b699141c-f160-495b-9954-891be088873e" (UID: "b699141c-f160-495b-9954-891be088873e"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:47:27 crc kubenswrapper[4779]: I0929 09:47:27.717888 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b699141c-f160-495b-9954-891be088873e-config" (OuterVolumeSpecName: "config") pod "b699141c-f160-495b-9954-891be088873e" (UID: "b699141c-f160-495b-9954-891be088873e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:47:27 crc kubenswrapper[4779]: I0929 09:47:27.718176 4779 generic.go:334] "Generic (PLEG): container finished" podID="b699141c-f160-495b-9954-891be088873e" containerID="7818b8027915b4e649d0b367cc73a4816284973b7956022148d3afd66fd08207" exitCode=0 Sep 29 09:47:27 crc kubenswrapper[4779]: I0929 09:47:27.718273 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c4564557-zhwlz" event={"ID":"b699141c-f160-495b-9954-891be088873e","Type":"ContainerDied","Data":"7818b8027915b4e649d0b367cc73a4816284973b7956022148d3afd66fd08207"} Sep 29 09:47:27 crc kubenswrapper[4779]: I0929 09:47:27.718310 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c4564557-zhwlz" event={"ID":"b699141c-f160-495b-9954-891be088873e","Type":"ContainerDied","Data":"022d8a67c1a13b586e8c1f7a7a97b1fa12d7bf0be5d58730104a6d8c48d43d7f"} Sep 29 09:47:27 crc kubenswrapper[4779]: I0929 09:47:27.718330 4779 scope.go:117] "RemoveContainer" containerID="7818b8027915b4e649d0b367cc73a4816284973b7956022148d3afd66fd08207" Sep 29 09:47:27 crc kubenswrapper[4779]: I0929 09:47:27.718454 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c4564557-zhwlz" Sep 29 09:47:27 crc kubenswrapper[4779]: I0929 09:47:27.720648 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-api-0" podStartSLOduration=2.720628967 podStartE2EDuration="2.720628967s" podCreationTimestamp="2025-09-29 09:47:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:47:27.718496935 +0000 UTC m=+1079.699820849" watchObservedRunningTime="2025-09-29 09:47:27.720628967 +0000 UTC m=+1079.701952871" Sep 29 09:47:27 crc kubenswrapper[4779]: I0929 09:47:27.741421 4779 generic.go:334] "Generic (PLEG): container finished" podID="bd1b88bc-6a49-42c8-8a07-9887030daf75" containerID="219a0bdf148545207fc39825edbb62537b18e3fbab5e6999e4931f8167f4f9e7" exitCode=0 Sep 29 09:47:27 crc kubenswrapper[4779]: I0929 09:47:27.741464 4779 generic.go:334] "Generic (PLEG): container finished" podID="bd1b88bc-6a49-42c8-8a07-9887030daf75" containerID="6d416c201fc6350bc36a311c5718e7aa7a01c858bafd3e6cacaf6b4ab23a663a" exitCode=143 Sep 29 09:47:27 crc kubenswrapper[4779]: I0929 09:47:27.741698 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"bd1b88bc-6a49-42c8-8a07-9887030daf75","Type":"ContainerDied","Data":"219a0bdf148545207fc39825edbb62537b18e3fbab5e6999e4931f8167f4f9e7"} Sep 29 09:47:27 crc kubenswrapper[4779]: I0929 09:47:27.741737 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"bd1b88bc-6a49-42c8-8a07-9887030daf75","Type":"ContainerDied","Data":"6d416c201fc6350bc36a311c5718e7aa7a01c858bafd3e6cacaf6b4ab23a663a"} Sep 29 09:47:27 crc kubenswrapper[4779]: I0929 09:47:27.758621 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b699141c-f160-495b-9954-891be088873e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "b699141c-f160-495b-9954-891be088873e" (UID: "b699141c-f160-495b-9954-891be088873e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:47:27 crc kubenswrapper[4779]: I0929 09:47:27.759062 4779 generic.go:334] "Generic (PLEG): container finished" podID="30b112ba-d459-4229-a0d5-a327cdf7d39a" containerID="9e5b56d40142339b2b19586d2b73e6bc5234bec86797cccf6459ec28f327845b" exitCode=0 Sep 29 09:47:27 crc kubenswrapper[4779]: I0929 09:47:27.759854 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-646f8b549-z5mvs" event={"ID":"30b112ba-d459-4229-a0d5-a327cdf7d39a","Type":"ContainerDied","Data":"9e5b56d40142339b2b19586d2b73e6bc5234bec86797cccf6459ec28f327845b"} Sep 29 09:47:27 crc kubenswrapper[4779]: I0929 09:47:27.771287 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"bbf17a9c-86ea-4b10-a73f-81758085521e","Type":"ContainerStarted","Data":"bd305a39d446730609ac23985540c5b8f3567b7b346f04f408b464a35558ce7f"} Sep 29 09:47:27 crc kubenswrapper[4779]: I0929 09:47:27.774049 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-77d4c87646-5g4ds" event={"ID":"cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45","Type":"ContainerStarted","Data":"c1374c457de1faba919074fd86fb446b0819c3f1acc863d7015c2170fe8701a4"} Sep 29 09:47:27 crc kubenswrapper[4779]: I0929 09:47:27.774075 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-77d4c87646-5g4ds" event={"ID":"cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45","Type":"ContainerStarted","Data":"ca53cbfe52044307f1b95d50744c6d4dc93502387524bd699c8fca021f4cdbb8"} Sep 29 09:47:27 crc kubenswrapper[4779]: I0929 09:47:27.774498 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-77d4c87646-5g4ds" Sep 29 09:47:27 crc kubenswrapper[4779]: I0929 09:47:27.774529 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-77d4c87646-5g4ds" Sep 29 09:47:27 crc kubenswrapper[4779]: I0929 09:47:27.801522 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b699141c-f160-495b-9954-891be088873e-config\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:27 crc kubenswrapper[4779]: I0929 09:47:27.801547 4779 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b699141c-f160-495b-9954-891be088873e-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:27 crc kubenswrapper[4779]: I0929 09:47:27.801558 4779 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b699141c-f160-495b-9954-891be088873e-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:27 crc kubenswrapper[4779]: I0929 09:47:27.806865 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=4.906966271 podStartE2EDuration="11.806846874s" podCreationTimestamp="2025-09-29 09:47:16 +0000 UTC" firstStartedPulling="2025-09-29 09:47:17.219109767 +0000 UTC m=+1069.200433671" lastFinishedPulling="2025-09-29 09:47:24.11899037 +0000 UTC m=+1076.100314274" observedRunningTime="2025-09-29 09:47:27.806186385 +0000 UTC m=+1079.787510279" watchObservedRunningTime="2025-09-29 09:47:27.806846874 +0000 UTC m=+1079.788170778" Sep 29 09:47:27 crc kubenswrapper[4779]: I0929 09:47:27.833406 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-77d4c87646-5g4ds" podStartSLOduration=3.833382099 podStartE2EDuration="3.833382099s" podCreationTimestamp="2025-09-29 09:47:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:47:27.825244455 +0000 UTC m=+1079.806568359" watchObservedRunningTime="2025-09-29 09:47:27.833382099 +0000 UTC m=+1079.814706003" Sep 29 09:47:28 crc kubenswrapper[4779]: I0929 09:47:28.061994 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c4564557-zhwlz"] Sep 29 09:47:28 crc kubenswrapper[4779]: I0929 09:47:28.068463 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5c4564557-zhwlz"] Sep 29 09:47:28 crc kubenswrapper[4779]: I0929 09:47:28.693473 4779 scope.go:117] "RemoveContainer" containerID="307ed76f7b7950af27ab794e02a8cafff89b1c316d7ef97b0f902732010d625f" Sep 29 09:47:28 crc kubenswrapper[4779]: I0929 09:47:28.760211 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b699141c-f160-495b-9954-891be088873e" path="/var/lib/kubelet/pods/b699141c-f160-495b-9954-891be088873e/volumes" Sep 29 09:47:28 crc kubenswrapper[4779]: I0929 09:47:28.813342 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"bd1b88bc-6a49-42c8-8a07-9887030daf75","Type":"ContainerDied","Data":"68776508c67b4c0464a13daee0cd3f7e79b859773e62b900460c638d1d8d2561"} Sep 29 09:47:28 crc kubenswrapper[4779]: I0929 09:47:28.813397 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="68776508c67b4c0464a13daee0cd3f7e79b859773e62b900460c638d1d8d2561" Sep 29 09:47:28 crc kubenswrapper[4779]: I0929 09:47:28.841727 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 29 09:47:28 crc kubenswrapper[4779]: I0929 09:47:28.864531 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-7fd47db598-vbs8p"] Sep 29 09:47:28 crc kubenswrapper[4779]: E0929 09:47:28.866915 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd1b88bc-6a49-42c8-8a07-9887030daf75" containerName="cinder-api-log" Sep 29 09:47:28 crc kubenswrapper[4779]: I0929 09:47:28.867073 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd1b88bc-6a49-42c8-8a07-9887030daf75" containerName="cinder-api-log" Sep 29 09:47:28 crc kubenswrapper[4779]: E0929 09:47:28.867168 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd1b88bc-6a49-42c8-8a07-9887030daf75" containerName="cinder-api" Sep 29 09:47:28 crc kubenswrapper[4779]: I0929 09:47:28.867248 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd1b88bc-6a49-42c8-8a07-9887030daf75" containerName="cinder-api" Sep 29 09:47:28 crc kubenswrapper[4779]: E0929 09:47:28.867349 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b699141c-f160-495b-9954-891be088873e" containerName="init" Sep 29 09:47:28 crc kubenswrapper[4779]: I0929 09:47:28.867439 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="b699141c-f160-495b-9954-891be088873e" containerName="init" Sep 29 09:47:28 crc kubenswrapper[4779]: E0929 09:47:28.867528 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b699141c-f160-495b-9954-891be088873e" containerName="dnsmasq-dns" Sep 29 09:47:28 crc kubenswrapper[4779]: I0929 09:47:28.867600 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="b699141c-f160-495b-9954-891be088873e" containerName="dnsmasq-dns" Sep 29 09:47:28 crc kubenswrapper[4779]: I0929 09:47:28.867921 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd1b88bc-6a49-42c8-8a07-9887030daf75" containerName="cinder-api" Sep 29 09:47:28 crc kubenswrapper[4779]: I0929 09:47:28.873586 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="b699141c-f160-495b-9954-891be088873e" containerName="dnsmasq-dns" Sep 29 09:47:28 crc kubenswrapper[4779]: I0929 09:47:28.873803 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd1b88bc-6a49-42c8-8a07-9887030daf75" containerName="cinder-api-log" Sep 29 09:47:28 crc kubenswrapper[4779]: I0929 09:47:28.879004 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7fd47db598-vbs8p" Sep 29 09:47:28 crc kubenswrapper[4779]: I0929 09:47:28.886293 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-7fd47db598-vbs8p"] Sep 29 09:47:28 crc kubenswrapper[4779]: I0929 09:47:28.887136 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Sep 29 09:47:28 crc kubenswrapper[4779]: I0929 09:47:28.887362 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Sep 29 09:47:28 crc kubenswrapper[4779]: I0929 09:47:28.931442 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bd1b88bc-6a49-42c8-8a07-9887030daf75-logs\") pod \"bd1b88bc-6a49-42c8-8a07-9887030daf75\" (UID: \"bd1b88bc-6a49-42c8-8a07-9887030daf75\") " Sep 29 09:47:28 crc kubenswrapper[4779]: I0929 09:47:28.931543 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bd1b88bc-6a49-42c8-8a07-9887030daf75-config-data-custom\") pod \"bd1b88bc-6a49-42c8-8a07-9887030daf75\" (UID: \"bd1b88bc-6a49-42c8-8a07-9887030daf75\") " Sep 29 09:47:28 crc kubenswrapper[4779]: I0929 09:47:28.931568 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ffjjf\" (UniqueName: \"kubernetes.io/projected/bd1b88bc-6a49-42c8-8a07-9887030daf75-kube-api-access-ffjjf\") pod \"bd1b88bc-6a49-42c8-8a07-9887030daf75\" (UID: \"bd1b88bc-6a49-42c8-8a07-9887030daf75\") " Sep 29 09:47:28 crc kubenswrapper[4779]: I0929 09:47:28.931602 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd1b88bc-6a49-42c8-8a07-9887030daf75-combined-ca-bundle\") pod \"bd1b88bc-6a49-42c8-8a07-9887030daf75\" (UID: \"bd1b88bc-6a49-42c8-8a07-9887030daf75\") " Sep 29 09:47:28 crc kubenswrapper[4779]: I0929 09:47:28.931728 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bd1b88bc-6a49-42c8-8a07-9887030daf75-scripts\") pod \"bd1b88bc-6a49-42c8-8a07-9887030daf75\" (UID: \"bd1b88bc-6a49-42c8-8a07-9887030daf75\") " Sep 29 09:47:28 crc kubenswrapper[4779]: I0929 09:47:28.931746 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bd1b88bc-6a49-42c8-8a07-9887030daf75-etc-machine-id\") pod \"bd1b88bc-6a49-42c8-8a07-9887030daf75\" (UID: \"bd1b88bc-6a49-42c8-8a07-9887030daf75\") " Sep 29 09:47:28 crc kubenswrapper[4779]: I0929 09:47:28.931846 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd1b88bc-6a49-42c8-8a07-9887030daf75-config-data\") pod \"bd1b88bc-6a49-42c8-8a07-9887030daf75\" (UID: \"bd1b88bc-6a49-42c8-8a07-9887030daf75\") " Sep 29 09:47:28 crc kubenswrapper[4779]: I0929 09:47:28.933564 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ca54737d-5152-4b83-9513-602951da4cfe-internal-tls-certs\") pod \"barbican-api-7fd47db598-vbs8p\" (UID: \"ca54737d-5152-4b83-9513-602951da4cfe\") " pod="openstack/barbican-api-7fd47db598-vbs8p" Sep 29 09:47:28 crc kubenswrapper[4779]: I0929 09:47:28.933639 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ca54737d-5152-4b83-9513-602951da4cfe-logs\") pod \"barbican-api-7fd47db598-vbs8p\" (UID: \"ca54737d-5152-4b83-9513-602951da4cfe\") " pod="openstack/barbican-api-7fd47db598-vbs8p" Sep 29 09:47:28 crc kubenswrapper[4779]: I0929 09:47:28.933755 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca54737d-5152-4b83-9513-602951da4cfe-combined-ca-bundle\") pod \"barbican-api-7fd47db598-vbs8p\" (UID: \"ca54737d-5152-4b83-9513-602951da4cfe\") " pod="openstack/barbican-api-7fd47db598-vbs8p" Sep 29 09:47:28 crc kubenswrapper[4779]: I0929 09:47:28.934064 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ca54737d-5152-4b83-9513-602951da4cfe-public-tls-certs\") pod \"barbican-api-7fd47db598-vbs8p\" (UID: \"ca54737d-5152-4b83-9513-602951da4cfe\") " pod="openstack/barbican-api-7fd47db598-vbs8p" Sep 29 09:47:28 crc kubenswrapper[4779]: I0929 09:47:28.934179 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca54737d-5152-4b83-9513-602951da4cfe-config-data\") pod \"barbican-api-7fd47db598-vbs8p\" (UID: \"ca54737d-5152-4b83-9513-602951da4cfe\") " pod="openstack/barbican-api-7fd47db598-vbs8p" Sep 29 09:47:28 crc kubenswrapper[4779]: I0929 09:47:28.934253 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xmrmz\" (UniqueName: \"kubernetes.io/projected/ca54737d-5152-4b83-9513-602951da4cfe-kube-api-access-xmrmz\") pod \"barbican-api-7fd47db598-vbs8p\" (UID: \"ca54737d-5152-4b83-9513-602951da4cfe\") " pod="openstack/barbican-api-7fd47db598-vbs8p" Sep 29 09:47:28 crc kubenswrapper[4779]: I0929 09:47:28.934391 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ca54737d-5152-4b83-9513-602951da4cfe-config-data-custom\") pod \"barbican-api-7fd47db598-vbs8p\" (UID: \"ca54737d-5152-4b83-9513-602951da4cfe\") " pod="openstack/barbican-api-7fd47db598-vbs8p" Sep 29 09:47:28 crc kubenswrapper[4779]: I0929 09:47:28.935769 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bd1b88bc-6a49-42c8-8a07-9887030daf75-logs" (OuterVolumeSpecName: "logs") pod "bd1b88bc-6a49-42c8-8a07-9887030daf75" (UID: "bd1b88bc-6a49-42c8-8a07-9887030daf75"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:47:28 crc kubenswrapper[4779]: I0929 09:47:28.940075 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd1b88bc-6a49-42c8-8a07-9887030daf75-kube-api-access-ffjjf" (OuterVolumeSpecName: "kube-api-access-ffjjf") pod "bd1b88bc-6a49-42c8-8a07-9887030daf75" (UID: "bd1b88bc-6a49-42c8-8a07-9887030daf75"). InnerVolumeSpecName "kube-api-access-ffjjf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:47:28 crc kubenswrapper[4779]: I0929 09:47:28.940159 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bd1b88bc-6a49-42c8-8a07-9887030daf75-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "bd1b88bc-6a49-42c8-8a07-9887030daf75" (UID: "bd1b88bc-6a49-42c8-8a07-9887030daf75"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 09:47:28 crc kubenswrapper[4779]: I0929 09:47:28.954168 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bd1b88bc-6a49-42c8-8a07-9887030daf75-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "bd1b88bc-6a49-42c8-8a07-9887030daf75" (UID: "bd1b88bc-6a49-42c8-8a07-9887030daf75"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:47:28 crc kubenswrapper[4779]: I0929 09:47:28.957393 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bd1b88bc-6a49-42c8-8a07-9887030daf75-scripts" (OuterVolumeSpecName: "scripts") pod "bd1b88bc-6a49-42c8-8a07-9887030daf75" (UID: "bd1b88bc-6a49-42c8-8a07-9887030daf75"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:47:28 crc kubenswrapper[4779]: I0929 09:47:28.988143 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bd1b88bc-6a49-42c8-8a07-9887030daf75-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bd1b88bc-6a49-42c8-8a07-9887030daf75" (UID: "bd1b88bc-6a49-42c8-8a07-9887030daf75"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:47:29 crc kubenswrapper[4779]: I0929 09:47:29.010265 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bd1b88bc-6a49-42c8-8a07-9887030daf75-config-data" (OuterVolumeSpecName: "config-data") pod "bd1b88bc-6a49-42c8-8a07-9887030daf75" (UID: "bd1b88bc-6a49-42c8-8a07-9887030daf75"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:47:29 crc kubenswrapper[4779]: I0929 09:47:29.035964 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca54737d-5152-4b83-9513-602951da4cfe-combined-ca-bundle\") pod \"barbican-api-7fd47db598-vbs8p\" (UID: \"ca54737d-5152-4b83-9513-602951da4cfe\") " pod="openstack/barbican-api-7fd47db598-vbs8p" Sep 29 09:47:29 crc kubenswrapper[4779]: I0929 09:47:29.036052 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ca54737d-5152-4b83-9513-602951da4cfe-public-tls-certs\") pod \"barbican-api-7fd47db598-vbs8p\" (UID: \"ca54737d-5152-4b83-9513-602951da4cfe\") " pod="openstack/barbican-api-7fd47db598-vbs8p" Sep 29 09:47:29 crc kubenswrapper[4779]: I0929 09:47:29.036098 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca54737d-5152-4b83-9513-602951da4cfe-config-data\") pod \"barbican-api-7fd47db598-vbs8p\" (UID: \"ca54737d-5152-4b83-9513-602951da4cfe\") " pod="openstack/barbican-api-7fd47db598-vbs8p" Sep 29 09:47:29 crc kubenswrapper[4779]: I0929 09:47:29.036134 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xmrmz\" (UniqueName: \"kubernetes.io/projected/ca54737d-5152-4b83-9513-602951da4cfe-kube-api-access-xmrmz\") pod \"barbican-api-7fd47db598-vbs8p\" (UID: \"ca54737d-5152-4b83-9513-602951da4cfe\") " pod="openstack/barbican-api-7fd47db598-vbs8p" Sep 29 09:47:29 crc kubenswrapper[4779]: I0929 09:47:29.036185 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ca54737d-5152-4b83-9513-602951da4cfe-config-data-custom\") pod \"barbican-api-7fd47db598-vbs8p\" (UID: \"ca54737d-5152-4b83-9513-602951da4cfe\") " pod="openstack/barbican-api-7fd47db598-vbs8p" Sep 29 09:47:29 crc kubenswrapper[4779]: I0929 09:47:29.036225 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ca54737d-5152-4b83-9513-602951da4cfe-internal-tls-certs\") pod \"barbican-api-7fd47db598-vbs8p\" (UID: \"ca54737d-5152-4b83-9513-602951da4cfe\") " pod="openstack/barbican-api-7fd47db598-vbs8p" Sep 29 09:47:29 crc kubenswrapper[4779]: I0929 09:47:29.036253 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ca54737d-5152-4b83-9513-602951da4cfe-logs\") pod \"barbican-api-7fd47db598-vbs8p\" (UID: \"ca54737d-5152-4b83-9513-602951da4cfe\") " pod="openstack/barbican-api-7fd47db598-vbs8p" Sep 29 09:47:29 crc kubenswrapper[4779]: I0929 09:47:29.036338 4779 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bd1b88bc-6a49-42c8-8a07-9887030daf75-logs\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:29 crc kubenswrapper[4779]: I0929 09:47:29.036350 4779 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bd1b88bc-6a49-42c8-8a07-9887030daf75-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:29 crc kubenswrapper[4779]: I0929 09:47:29.036362 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ffjjf\" (UniqueName: \"kubernetes.io/projected/bd1b88bc-6a49-42c8-8a07-9887030daf75-kube-api-access-ffjjf\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:29 crc kubenswrapper[4779]: I0929 09:47:29.036372 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd1b88bc-6a49-42c8-8a07-9887030daf75-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:29 crc kubenswrapper[4779]: I0929 09:47:29.036380 4779 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bd1b88bc-6a49-42c8-8a07-9887030daf75-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:29 crc kubenswrapper[4779]: I0929 09:47:29.036388 4779 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bd1b88bc-6a49-42c8-8a07-9887030daf75-etc-machine-id\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:29 crc kubenswrapper[4779]: I0929 09:47:29.036396 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd1b88bc-6a49-42c8-8a07-9887030daf75-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:29 crc kubenswrapper[4779]: I0929 09:47:29.036763 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ca54737d-5152-4b83-9513-602951da4cfe-logs\") pod \"barbican-api-7fd47db598-vbs8p\" (UID: \"ca54737d-5152-4b83-9513-602951da4cfe\") " pod="openstack/barbican-api-7fd47db598-vbs8p" Sep 29 09:47:29 crc kubenswrapper[4779]: I0929 09:47:29.040012 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca54737d-5152-4b83-9513-602951da4cfe-combined-ca-bundle\") pod \"barbican-api-7fd47db598-vbs8p\" (UID: \"ca54737d-5152-4b83-9513-602951da4cfe\") " pod="openstack/barbican-api-7fd47db598-vbs8p" Sep 29 09:47:29 crc kubenswrapper[4779]: I0929 09:47:29.041797 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ca54737d-5152-4b83-9513-602951da4cfe-public-tls-certs\") pod \"barbican-api-7fd47db598-vbs8p\" (UID: \"ca54737d-5152-4b83-9513-602951da4cfe\") " pod="openstack/barbican-api-7fd47db598-vbs8p" Sep 29 09:47:29 crc kubenswrapper[4779]: I0929 09:47:29.043523 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ca54737d-5152-4b83-9513-602951da4cfe-config-data-custom\") pod \"barbican-api-7fd47db598-vbs8p\" (UID: \"ca54737d-5152-4b83-9513-602951da4cfe\") " pod="openstack/barbican-api-7fd47db598-vbs8p" Sep 29 09:47:29 crc kubenswrapper[4779]: I0929 09:47:29.044988 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca54737d-5152-4b83-9513-602951da4cfe-config-data\") pod \"barbican-api-7fd47db598-vbs8p\" (UID: \"ca54737d-5152-4b83-9513-602951da4cfe\") " pod="openstack/barbican-api-7fd47db598-vbs8p" Sep 29 09:47:29 crc kubenswrapper[4779]: I0929 09:47:29.051685 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ca54737d-5152-4b83-9513-602951da4cfe-internal-tls-certs\") pod \"barbican-api-7fd47db598-vbs8p\" (UID: \"ca54737d-5152-4b83-9513-602951da4cfe\") " pod="openstack/barbican-api-7fd47db598-vbs8p" Sep 29 09:47:29 crc kubenswrapper[4779]: I0929 09:47:29.062676 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xmrmz\" (UniqueName: \"kubernetes.io/projected/ca54737d-5152-4b83-9513-602951da4cfe-kube-api-access-xmrmz\") pod \"barbican-api-7fd47db598-vbs8p\" (UID: \"ca54737d-5152-4b83-9513-602951da4cfe\") " pod="openstack/barbican-api-7fd47db598-vbs8p" Sep 29 09:47:29 crc kubenswrapper[4779]: I0929 09:47:29.218287 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7fd47db598-vbs8p" Sep 29 09:47:29 crc kubenswrapper[4779]: I0929 09:47:29.587528 4779 scope.go:117] "RemoveContainer" containerID="7818b8027915b4e649d0b367cc73a4816284973b7956022148d3afd66fd08207" Sep 29 09:47:29 crc kubenswrapper[4779]: E0929 09:47:29.588454 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7818b8027915b4e649d0b367cc73a4816284973b7956022148d3afd66fd08207\": container with ID starting with 7818b8027915b4e649d0b367cc73a4816284973b7956022148d3afd66fd08207 not found: ID does not exist" containerID="7818b8027915b4e649d0b367cc73a4816284973b7956022148d3afd66fd08207" Sep 29 09:47:29 crc kubenswrapper[4779]: I0929 09:47:29.588552 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7818b8027915b4e649d0b367cc73a4816284973b7956022148d3afd66fd08207"} err="failed to get container status \"7818b8027915b4e649d0b367cc73a4816284973b7956022148d3afd66fd08207\": rpc error: code = NotFound desc = could not find container \"7818b8027915b4e649d0b367cc73a4816284973b7956022148d3afd66fd08207\": container with ID starting with 7818b8027915b4e649d0b367cc73a4816284973b7956022148d3afd66fd08207 not found: ID does not exist" Sep 29 09:47:29 crc kubenswrapper[4779]: I0929 09:47:29.588636 4779 scope.go:117] "RemoveContainer" containerID="307ed76f7b7950af27ab794e02a8cafff89b1c316d7ef97b0f902732010d625f" Sep 29 09:47:29 crc kubenswrapper[4779]: E0929 09:47:29.588978 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"307ed76f7b7950af27ab794e02a8cafff89b1c316d7ef97b0f902732010d625f\": container with ID starting with 307ed76f7b7950af27ab794e02a8cafff89b1c316d7ef97b0f902732010d625f not found: ID does not exist" containerID="307ed76f7b7950af27ab794e02a8cafff89b1c316d7ef97b0f902732010d625f" Sep 29 09:47:29 crc kubenswrapper[4779]: I0929 09:47:29.589080 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"307ed76f7b7950af27ab794e02a8cafff89b1c316d7ef97b0f902732010d625f"} err="failed to get container status \"307ed76f7b7950af27ab794e02a8cafff89b1c316d7ef97b0f902732010d625f\": rpc error: code = NotFound desc = could not find container \"307ed76f7b7950af27ab794e02a8cafff89b1c316d7ef97b0f902732010d625f\": container with ID starting with 307ed76f7b7950af27ab794e02a8cafff89b1c316d7ef97b0f902732010d625f not found: ID does not exist" Sep 29 09:47:29 crc kubenswrapper[4779]: I0929 09:47:29.866962 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 29 09:47:29 crc kubenswrapper[4779]: I0929 09:47:29.923658 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Sep 29 09:47:29 crc kubenswrapper[4779]: I0929 09:47:29.958895 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Sep 29 09:47:29 crc kubenswrapper[4779]: I0929 09:47:29.964583 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Sep 29 09:47:29 crc kubenswrapper[4779]: I0929 09:47:29.980567 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 29 09:47:29 crc kubenswrapper[4779]: I0929 09:47:29.985188 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Sep 29 09:47:29 crc kubenswrapper[4779]: I0929 09:47:29.989071 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Sep 29 09:47:29 crc kubenswrapper[4779]: I0929 09:47:29.990035 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.006959 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.064057 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/901ac146-e211-4724-a8f5-2c4f9c966bba-scripts\") pod \"cinder-api-0\" (UID: \"901ac146-e211-4724-a8f5-2c4f9c966bba\") " pod="openstack/cinder-api-0" Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.064110 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/901ac146-e211-4724-a8f5-2c4f9c966bba-config-data-custom\") pod \"cinder-api-0\" (UID: \"901ac146-e211-4724-a8f5-2c4f9c966bba\") " pod="openstack/cinder-api-0" Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.064151 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/901ac146-e211-4724-a8f5-2c4f9c966bba-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"901ac146-e211-4724-a8f5-2c4f9c966bba\") " pod="openstack/cinder-api-0" Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.064182 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/901ac146-e211-4724-a8f5-2c4f9c966bba-public-tls-certs\") pod \"cinder-api-0\" (UID: \"901ac146-e211-4724-a8f5-2c4f9c966bba\") " pod="openstack/cinder-api-0" Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.064325 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/901ac146-e211-4724-a8f5-2c4f9c966bba-etc-machine-id\") pod \"cinder-api-0\" (UID: \"901ac146-e211-4724-a8f5-2c4f9c966bba\") " pod="openstack/cinder-api-0" Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.064386 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/901ac146-e211-4724-a8f5-2c4f9c966bba-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"901ac146-e211-4724-a8f5-2c4f9c966bba\") " pod="openstack/cinder-api-0" Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.064404 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/901ac146-e211-4724-a8f5-2c4f9c966bba-logs\") pod \"cinder-api-0\" (UID: \"901ac146-e211-4724-a8f5-2c4f9c966bba\") " pod="openstack/cinder-api-0" Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.064422 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/901ac146-e211-4724-a8f5-2c4f9c966bba-config-data\") pod \"cinder-api-0\" (UID: \"901ac146-e211-4724-a8f5-2c4f9c966bba\") " pod="openstack/cinder-api-0" Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.064471 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5flf6\" (UniqueName: \"kubernetes.io/projected/901ac146-e211-4724-a8f5-2c4f9c966bba-kube-api-access-5flf6\") pod \"cinder-api-0\" (UID: \"901ac146-e211-4724-a8f5-2c4f9c966bba\") " pod="openstack/cinder-api-0" Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.170135 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/901ac146-e211-4724-a8f5-2c4f9c966bba-etc-machine-id\") pod \"cinder-api-0\" (UID: \"901ac146-e211-4724-a8f5-2c4f9c966bba\") " pod="openstack/cinder-api-0" Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.170228 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/901ac146-e211-4724-a8f5-2c4f9c966bba-etc-machine-id\") pod \"cinder-api-0\" (UID: \"901ac146-e211-4724-a8f5-2c4f9c966bba\") " pod="openstack/cinder-api-0" Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.170389 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/901ac146-e211-4724-a8f5-2c4f9c966bba-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"901ac146-e211-4724-a8f5-2c4f9c966bba\") " pod="openstack/cinder-api-0" Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.171432 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/901ac146-e211-4724-a8f5-2c4f9c966bba-logs\") pod \"cinder-api-0\" (UID: \"901ac146-e211-4724-a8f5-2c4f9c966bba\") " pod="openstack/cinder-api-0" Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.171467 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/901ac146-e211-4724-a8f5-2c4f9c966bba-config-data\") pod \"cinder-api-0\" (UID: \"901ac146-e211-4724-a8f5-2c4f9c966bba\") " pod="openstack/cinder-api-0" Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.171566 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5flf6\" (UniqueName: \"kubernetes.io/projected/901ac146-e211-4724-a8f5-2c4f9c966bba-kube-api-access-5flf6\") pod \"cinder-api-0\" (UID: \"901ac146-e211-4724-a8f5-2c4f9c966bba\") " pod="openstack/cinder-api-0" Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.171610 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/901ac146-e211-4724-a8f5-2c4f9c966bba-scripts\") pod \"cinder-api-0\" (UID: \"901ac146-e211-4724-a8f5-2c4f9c966bba\") " pod="openstack/cinder-api-0" Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.171660 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/901ac146-e211-4724-a8f5-2c4f9c966bba-config-data-custom\") pod \"cinder-api-0\" (UID: \"901ac146-e211-4724-a8f5-2c4f9c966bba\") " pod="openstack/cinder-api-0" Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.171689 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/901ac146-e211-4724-a8f5-2c4f9c966bba-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"901ac146-e211-4724-a8f5-2c4f9c966bba\") " pod="openstack/cinder-api-0" Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.171756 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/901ac146-e211-4724-a8f5-2c4f9c966bba-public-tls-certs\") pod \"cinder-api-0\" (UID: \"901ac146-e211-4724-a8f5-2c4f9c966bba\") " pod="openstack/cinder-api-0" Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.172019 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/901ac146-e211-4724-a8f5-2c4f9c966bba-logs\") pod \"cinder-api-0\" (UID: \"901ac146-e211-4724-a8f5-2c4f9c966bba\") " pod="openstack/cinder-api-0" Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.176698 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/901ac146-e211-4724-a8f5-2c4f9c966bba-scripts\") pod \"cinder-api-0\" (UID: \"901ac146-e211-4724-a8f5-2c4f9c966bba\") " pod="openstack/cinder-api-0" Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.177720 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/901ac146-e211-4724-a8f5-2c4f9c966bba-config-data\") pod \"cinder-api-0\" (UID: \"901ac146-e211-4724-a8f5-2c4f9c966bba\") " pod="openstack/cinder-api-0" Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.180182 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/901ac146-e211-4724-a8f5-2c4f9c966bba-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"901ac146-e211-4724-a8f5-2c4f9c966bba\") " pod="openstack/cinder-api-0" Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.183442 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/901ac146-e211-4724-a8f5-2c4f9c966bba-public-tls-certs\") pod \"cinder-api-0\" (UID: \"901ac146-e211-4724-a8f5-2c4f9c966bba\") " pod="openstack/cinder-api-0" Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.183613 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/901ac146-e211-4724-a8f5-2c4f9c966bba-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"901ac146-e211-4724-a8f5-2c4f9c966bba\") " pod="openstack/cinder-api-0" Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.192097 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-7fd47db598-vbs8p"] Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.194703 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/901ac146-e211-4724-a8f5-2c4f9c966bba-config-data-custom\") pod \"cinder-api-0\" (UID: \"901ac146-e211-4724-a8f5-2c4f9c966bba\") " pod="openstack/cinder-api-0" Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.208554 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5flf6\" (UniqueName: \"kubernetes.io/projected/901ac146-e211-4724-a8f5-2c4f9c966bba-kube-api-access-5flf6\") pod \"cinder-api-0\" (UID: \"901ac146-e211-4724-a8f5-2c4f9c966bba\") " pod="openstack/cinder-api-0" Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.308694 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.726472 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd1b88bc-6a49-42c8-8a07-9887030daf75" path="/var/lib/kubelet/pods/bd1b88bc-6a49-42c8-8a07-9887030daf75/volumes" Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.826092 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.831024 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.884539 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/785e1b61-3f05-4ac1-ade7-57c25fe6f177-combined-ca-bundle\") pod \"785e1b61-3f05-4ac1-ade7-57c25fe6f177\" (UID: \"785e1b61-3f05-4ac1-ade7-57c25fe6f177\") " Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.884623 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/785e1b61-3f05-4ac1-ade7-57c25fe6f177-log-httpd\") pod \"785e1b61-3f05-4ac1-ade7-57c25fe6f177\" (UID: \"785e1b61-3f05-4ac1-ade7-57c25fe6f177\") " Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.884689 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/785e1b61-3f05-4ac1-ade7-57c25fe6f177-run-httpd\") pod \"785e1b61-3f05-4ac1-ade7-57c25fe6f177\" (UID: \"785e1b61-3f05-4ac1-ade7-57c25fe6f177\") " Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.884767 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/785e1b61-3f05-4ac1-ade7-57c25fe6f177-sg-core-conf-yaml\") pod \"785e1b61-3f05-4ac1-ade7-57c25fe6f177\" (UID: \"785e1b61-3f05-4ac1-ade7-57c25fe6f177\") " Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.884827 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f4l9v\" (UniqueName: \"kubernetes.io/projected/785e1b61-3f05-4ac1-ade7-57c25fe6f177-kube-api-access-f4l9v\") pod \"785e1b61-3f05-4ac1-ade7-57c25fe6f177\" (UID: \"785e1b61-3f05-4ac1-ade7-57c25fe6f177\") " Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.884882 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/785e1b61-3f05-4ac1-ade7-57c25fe6f177-config-data\") pod \"785e1b61-3f05-4ac1-ade7-57c25fe6f177\" (UID: \"785e1b61-3f05-4ac1-ade7-57c25fe6f177\") " Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.884984 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/785e1b61-3f05-4ac1-ade7-57c25fe6f177-scripts\") pod \"785e1b61-3f05-4ac1-ade7-57c25fe6f177\" (UID: \"785e1b61-3f05-4ac1-ade7-57c25fe6f177\") " Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.886392 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6cc6cc97c7-9ffpm" event={"ID":"e81a45ce-9e33-41f4-9cf2-aa44749e66e3","Type":"ContainerStarted","Data":"d4431d24f306502c54c6eec470e7b2e7855c50b60088a3ab397d5b2cee86313a"} Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.886440 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6cc6cc97c7-9ffpm" event={"ID":"e81a45ce-9e33-41f4-9cf2-aa44749e66e3","Type":"ContainerStarted","Data":"f3b3ae53c73328d4b41c4c6be16f3a54fda87e654da3824f2ad43175000a1106"} Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.887819 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/785e1b61-3f05-4ac1-ade7-57c25fe6f177-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "785e1b61-3f05-4ac1-ade7-57c25fe6f177" (UID: "785e1b61-3f05-4ac1-ade7-57c25fe6f177"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.894338 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/785e1b61-3f05-4ac1-ade7-57c25fe6f177-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "785e1b61-3f05-4ac1-ade7-57c25fe6f177" (UID: "785e1b61-3f05-4ac1-ade7-57c25fe6f177"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.905016 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/785e1b61-3f05-4ac1-ade7-57c25fe6f177-scripts" (OuterVolumeSpecName: "scripts") pod "785e1b61-3f05-4ac1-ade7-57c25fe6f177" (UID: "785e1b61-3f05-4ac1-ade7-57c25fe6f177"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.905167 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/785e1b61-3f05-4ac1-ade7-57c25fe6f177-kube-api-access-f4l9v" (OuterVolumeSpecName: "kube-api-access-f4l9v") pod "785e1b61-3f05-4ac1-ade7-57c25fe6f177" (UID: "785e1b61-3f05-4ac1-ade7-57c25fe6f177"). InnerVolumeSpecName "kube-api-access-f4l9v". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.907105 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-646f8b549-z5mvs" event={"ID":"30b112ba-d459-4229-a0d5-a327cdf7d39a","Type":"ContainerStarted","Data":"797dbaa177f801ec5b0bc4880bb48c6bc51054477e09eed659029962b71a0084"} Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.908104 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-646f8b549-z5mvs" Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.935156 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/785e1b61-3f05-4ac1-ade7-57c25fe6f177-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "785e1b61-3f05-4ac1-ade7-57c25fe6f177" (UID: "785e1b61-3f05-4ac1-ade7-57c25fe6f177"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.935760 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-6cc6cc97c7-9ffpm" podStartSLOduration=3.204587221 podStartE2EDuration="6.935733714s" podCreationTimestamp="2025-09-29 09:47:24 +0000 UTC" firstStartedPulling="2025-09-29 09:47:25.892665395 +0000 UTC m=+1077.873989299" lastFinishedPulling="2025-09-29 09:47:29.623811888 +0000 UTC m=+1081.605135792" observedRunningTime="2025-09-29 09:47:30.930807752 +0000 UTC m=+1082.912131656" watchObservedRunningTime="2025-09-29 09:47:30.935733714 +0000 UTC m=+1082.917057618" Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.941248 4779 generic.go:334] "Generic (PLEG): container finished" podID="785e1b61-3f05-4ac1-ade7-57c25fe6f177" containerID="54bdd4db49a7cf15f5f139bc7a5d6f441c6efde4ed6eb5046b3a9efd35c90b10" exitCode=0 Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.941411 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"785e1b61-3f05-4ac1-ade7-57c25fe6f177","Type":"ContainerDied","Data":"54bdd4db49a7cf15f5f139bc7a5d6f441c6efde4ed6eb5046b3a9efd35c90b10"} Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.941493 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"785e1b61-3f05-4ac1-ade7-57c25fe6f177","Type":"ContainerDied","Data":"8bfe561dcb1fb50cbfca99cc51e4d2ac5adf5bf75409d47b9f9ebaa22c1ff4a0"} Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.941553 4779 scope.go:117] "RemoveContainer" containerID="faee0e298717649e7e4d7b0193c03af7eeb0834725b7e8cc713d239054d3e16a" Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.941734 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.970526 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7fd47db598-vbs8p" event={"ID":"ca54737d-5152-4b83-9513-602951da4cfe","Type":"ContainerStarted","Data":"761ee72bc264b982d2d875fb8f093c5d1e4a24bdd42c6005dbaf3b3c54f153bb"} Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.970584 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7fd47db598-vbs8p" event={"ID":"ca54737d-5152-4b83-9513-602951da4cfe","Type":"ContainerStarted","Data":"b8889251d94ea4126392d2994ddf8e59f98553bada2cc64d81a981d27f5110f4"} Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.970598 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7fd47db598-vbs8p" event={"ID":"ca54737d-5152-4b83-9513-602951da4cfe","Type":"ContainerStarted","Data":"d4f218f0b82897303cc3ac2e85e05d4fec89da2487d534f9bd5d37a572b1c59b"} Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.970894 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-7fd47db598-vbs8p" Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.971614 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-7fd47db598-vbs8p" Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.983044 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-646f8b549-z5mvs" podStartSLOduration=6.983022988 podStartE2EDuration="6.983022988s" podCreationTimestamp="2025-09-29 09:47:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:47:30.98067513 +0000 UTC m=+1082.961999034" watchObservedRunningTime="2025-09-29 09:47:30.983022988 +0000 UTC m=+1082.964346892" Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.989563 4779 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/785e1b61-3f05-4ac1-ade7-57c25fe6f177-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.989598 4779 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/785e1b61-3f05-4ac1-ade7-57c25fe6f177-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.989610 4779 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/785e1b61-3f05-4ac1-ade7-57c25fe6f177-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.989625 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f4l9v\" (UniqueName: \"kubernetes.io/projected/785e1b61-3f05-4ac1-ade7-57c25fe6f177-kube-api-access-f4l9v\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:30 crc kubenswrapper[4779]: I0929 09:47:30.989636 4779 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/785e1b61-3f05-4ac1-ade7-57c25fe6f177-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:31 crc kubenswrapper[4779]: I0929 09:47:30.998849 4779 scope.go:117] "RemoveContainer" containerID="f17b5eb38581eabcc30f952ba49ed29c2beb5ce49347b128736c1713c162d65d" Sep 29 09:47:31 crc kubenswrapper[4779]: I0929 09:47:31.005095 4779 generic.go:334] "Generic (PLEG): container finished" podID="f67e636b-969b-48ee-bbec-3d8b38b22274" containerID="57d6ed8fa30263cb34a23159118310a096e08b89d83acaef3afa9ecc7a1a48e3" exitCode=1 Sep 29 09:47:31 crc kubenswrapper[4779]: I0929 09:47:31.005166 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"f67e636b-969b-48ee-bbec-3d8b38b22274","Type":"ContainerDied","Data":"57d6ed8fa30263cb34a23159118310a096e08b89d83acaef3afa9ecc7a1a48e3"} Sep 29 09:47:31 crc kubenswrapper[4779]: I0929 09:47:31.005710 4779 scope.go:117] "RemoveContainer" containerID="57d6ed8fa30263cb34a23159118310a096e08b89d83acaef3afa9ecc7a1a48e3" Sep 29 09:47:31 crc kubenswrapper[4779]: E0929 09:47:31.005917 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-decision-engine\" with CrashLoopBackOff: \"back-off 20s restarting failed container=watcher-decision-engine pod=watcher-decision-engine-0_openstack(f67e636b-969b-48ee-bbec-3d8b38b22274)\"" pod="openstack/watcher-decision-engine-0" podUID="f67e636b-969b-48ee-bbec-3d8b38b22274" Sep 29 09:47:31 crc kubenswrapper[4779]: I0929 09:47:31.019202 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"901ac146-e211-4724-a8f5-2c4f9c966bba","Type":"ContainerStarted","Data":"5df5415b99451e443895a28293290115aa96009f6833b5537fe1b85d19b346c6"} Sep 29 09:47:31 crc kubenswrapper[4779]: I0929 09:47:31.024150 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-7fd47db598-vbs8p" podStartSLOduration=3.024112053 podStartE2EDuration="3.024112053s" podCreationTimestamp="2025-09-29 09:47:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:47:31.023374472 +0000 UTC m=+1083.004698376" watchObservedRunningTime="2025-09-29 09:47:31.024112053 +0000 UTC m=+1083.005435957" Sep 29 09:47:31 crc kubenswrapper[4779]: I0929 09:47:31.046478 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5565dc4c94-dr8wr" event={"ID":"8fdf2fc8-ee7f-4a6b-b9c8-b90e96f8f75c","Type":"ContainerStarted","Data":"834559b809a985a553e7a3f6b75600b36b51e8f2fec32831c1ccf6ac0d6c30b6"} Sep 29 09:47:31 crc kubenswrapper[4779]: I0929 09:47:31.046520 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5565dc4c94-dr8wr" event={"ID":"8fdf2fc8-ee7f-4a6b-b9c8-b90e96f8f75c","Type":"ContainerStarted","Data":"88ea5a51557aa9e989b43a7c0d47205290065657269ec6cfabcad5c8322e8af1"} Sep 29 09:47:31 crc kubenswrapper[4779]: I0929 09:47:31.086618 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-api-0" Sep 29 09:47:31 crc kubenswrapper[4779]: I0929 09:47:31.131429 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-5565dc4c94-dr8wr" podStartSLOduration=3.633265677 podStartE2EDuration="7.131409768s" podCreationTimestamp="2025-09-29 09:47:24 +0000 UTC" firstStartedPulling="2025-09-29 09:47:26.168634146 +0000 UTC m=+1078.149958040" lastFinishedPulling="2025-09-29 09:47:29.666778227 +0000 UTC m=+1081.648102131" observedRunningTime="2025-09-29 09:47:31.128675339 +0000 UTC m=+1083.109999243" watchObservedRunningTime="2025-09-29 09:47:31.131409768 +0000 UTC m=+1083.112733692" Sep 29 09:47:31 crc kubenswrapper[4779]: I0929 09:47:31.146188 4779 scope.go:117] "RemoveContainer" containerID="54bdd4db49a7cf15f5f139bc7a5d6f441c6efde4ed6eb5046b3a9efd35c90b10" Sep 29 09:47:31 crc kubenswrapper[4779]: I0929 09:47:31.177855 4779 scope.go:117] "RemoveContainer" containerID="e476822b62ee12988f0721149f099ee1097a135a452a6fde2057bf3720bfd0ca" Sep 29 09:47:31 crc kubenswrapper[4779]: I0929 09:47:31.178066 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/785e1b61-3f05-4ac1-ade7-57c25fe6f177-config-data" (OuterVolumeSpecName: "config-data") pod "785e1b61-3f05-4ac1-ade7-57c25fe6f177" (UID: "785e1b61-3f05-4ac1-ade7-57c25fe6f177"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:47:31 crc kubenswrapper[4779]: I0929 09:47:31.181238 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/785e1b61-3f05-4ac1-ade7-57c25fe6f177-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "785e1b61-3f05-4ac1-ade7-57c25fe6f177" (UID: "785e1b61-3f05-4ac1-ade7-57c25fe6f177"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:47:31 crc kubenswrapper[4779]: I0929 09:47:31.194445 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/785e1b61-3f05-4ac1-ade7-57c25fe6f177-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:31 crc kubenswrapper[4779]: I0929 09:47:31.194487 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/785e1b61-3f05-4ac1-ade7-57c25fe6f177-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:31 crc kubenswrapper[4779]: I0929 09:47:31.204097 4779 scope.go:117] "RemoveContainer" containerID="faee0e298717649e7e4d7b0193c03af7eeb0834725b7e8cc713d239054d3e16a" Sep 29 09:47:31 crc kubenswrapper[4779]: E0929 09:47:31.205177 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"faee0e298717649e7e4d7b0193c03af7eeb0834725b7e8cc713d239054d3e16a\": container with ID starting with faee0e298717649e7e4d7b0193c03af7eeb0834725b7e8cc713d239054d3e16a not found: ID does not exist" containerID="faee0e298717649e7e4d7b0193c03af7eeb0834725b7e8cc713d239054d3e16a" Sep 29 09:47:31 crc kubenswrapper[4779]: I0929 09:47:31.205219 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"faee0e298717649e7e4d7b0193c03af7eeb0834725b7e8cc713d239054d3e16a"} err="failed to get container status \"faee0e298717649e7e4d7b0193c03af7eeb0834725b7e8cc713d239054d3e16a\": rpc error: code = NotFound desc = could not find container \"faee0e298717649e7e4d7b0193c03af7eeb0834725b7e8cc713d239054d3e16a\": container with ID starting with faee0e298717649e7e4d7b0193c03af7eeb0834725b7e8cc713d239054d3e16a not found: ID does not exist" Sep 29 09:47:31 crc kubenswrapper[4779]: I0929 09:47:31.205243 4779 scope.go:117] "RemoveContainer" containerID="f17b5eb38581eabcc30f952ba49ed29c2beb5ce49347b128736c1713c162d65d" Sep 29 09:47:31 crc kubenswrapper[4779]: E0929 09:47:31.206424 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f17b5eb38581eabcc30f952ba49ed29c2beb5ce49347b128736c1713c162d65d\": container with ID starting with f17b5eb38581eabcc30f952ba49ed29c2beb5ce49347b128736c1713c162d65d not found: ID does not exist" containerID="f17b5eb38581eabcc30f952ba49ed29c2beb5ce49347b128736c1713c162d65d" Sep 29 09:47:31 crc kubenswrapper[4779]: I0929 09:47:31.206449 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f17b5eb38581eabcc30f952ba49ed29c2beb5ce49347b128736c1713c162d65d"} err="failed to get container status \"f17b5eb38581eabcc30f952ba49ed29c2beb5ce49347b128736c1713c162d65d\": rpc error: code = NotFound desc = could not find container \"f17b5eb38581eabcc30f952ba49ed29c2beb5ce49347b128736c1713c162d65d\": container with ID starting with f17b5eb38581eabcc30f952ba49ed29c2beb5ce49347b128736c1713c162d65d not found: ID does not exist" Sep 29 09:47:31 crc kubenswrapper[4779]: I0929 09:47:31.206465 4779 scope.go:117] "RemoveContainer" containerID="54bdd4db49a7cf15f5f139bc7a5d6f441c6efde4ed6eb5046b3a9efd35c90b10" Sep 29 09:47:31 crc kubenswrapper[4779]: E0929 09:47:31.209023 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"54bdd4db49a7cf15f5f139bc7a5d6f441c6efde4ed6eb5046b3a9efd35c90b10\": container with ID starting with 54bdd4db49a7cf15f5f139bc7a5d6f441c6efde4ed6eb5046b3a9efd35c90b10 not found: ID does not exist" containerID="54bdd4db49a7cf15f5f139bc7a5d6f441c6efde4ed6eb5046b3a9efd35c90b10" Sep 29 09:47:31 crc kubenswrapper[4779]: I0929 09:47:31.209059 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"54bdd4db49a7cf15f5f139bc7a5d6f441c6efde4ed6eb5046b3a9efd35c90b10"} err="failed to get container status \"54bdd4db49a7cf15f5f139bc7a5d6f441c6efde4ed6eb5046b3a9efd35c90b10\": rpc error: code = NotFound desc = could not find container \"54bdd4db49a7cf15f5f139bc7a5d6f441c6efde4ed6eb5046b3a9efd35c90b10\": container with ID starting with 54bdd4db49a7cf15f5f139bc7a5d6f441c6efde4ed6eb5046b3a9efd35c90b10 not found: ID does not exist" Sep 29 09:47:31 crc kubenswrapper[4779]: I0929 09:47:31.209082 4779 scope.go:117] "RemoveContainer" containerID="e476822b62ee12988f0721149f099ee1097a135a452a6fde2057bf3720bfd0ca" Sep 29 09:47:31 crc kubenswrapper[4779]: E0929 09:47:31.209731 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e476822b62ee12988f0721149f099ee1097a135a452a6fde2057bf3720bfd0ca\": container with ID starting with e476822b62ee12988f0721149f099ee1097a135a452a6fde2057bf3720bfd0ca not found: ID does not exist" containerID="e476822b62ee12988f0721149f099ee1097a135a452a6fde2057bf3720bfd0ca" Sep 29 09:47:31 crc kubenswrapper[4779]: I0929 09:47:31.209754 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e476822b62ee12988f0721149f099ee1097a135a452a6fde2057bf3720bfd0ca"} err="failed to get container status \"e476822b62ee12988f0721149f099ee1097a135a452a6fde2057bf3720bfd0ca\": rpc error: code = NotFound desc = could not find container \"e476822b62ee12988f0721149f099ee1097a135a452a6fde2057bf3720bfd0ca\": container with ID starting with e476822b62ee12988f0721149f099ee1097a135a452a6fde2057bf3720bfd0ca not found: ID does not exist" Sep 29 09:47:31 crc kubenswrapper[4779]: I0929 09:47:31.209769 4779 scope.go:117] "RemoveContainer" containerID="dc57f62b4e8aac95d8856e4547b1ab206d4fe4ef4ceeb48680216143e21ffd0b" Sep 29 09:47:31 crc kubenswrapper[4779]: I0929 09:47:31.330298 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 09:47:31 crc kubenswrapper[4779]: I0929 09:47:31.341671 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 29 09:47:31 crc kubenswrapper[4779]: I0929 09:47:31.356503 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 09:47:31 crc kubenswrapper[4779]: E0929 09:47:31.356826 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="785e1b61-3f05-4ac1-ade7-57c25fe6f177" containerName="ceilometer-notification-agent" Sep 29 09:47:31 crc kubenswrapper[4779]: I0929 09:47:31.356838 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="785e1b61-3f05-4ac1-ade7-57c25fe6f177" containerName="ceilometer-notification-agent" Sep 29 09:47:31 crc kubenswrapper[4779]: E0929 09:47:31.356866 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="785e1b61-3f05-4ac1-ade7-57c25fe6f177" containerName="ceilometer-central-agent" Sep 29 09:47:31 crc kubenswrapper[4779]: I0929 09:47:31.356872 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="785e1b61-3f05-4ac1-ade7-57c25fe6f177" containerName="ceilometer-central-agent" Sep 29 09:47:31 crc kubenswrapper[4779]: E0929 09:47:31.356882 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="785e1b61-3f05-4ac1-ade7-57c25fe6f177" containerName="sg-core" Sep 29 09:47:31 crc kubenswrapper[4779]: I0929 09:47:31.356888 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="785e1b61-3f05-4ac1-ade7-57c25fe6f177" containerName="sg-core" Sep 29 09:47:31 crc kubenswrapper[4779]: E0929 09:47:31.356914 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="785e1b61-3f05-4ac1-ade7-57c25fe6f177" containerName="proxy-httpd" Sep 29 09:47:31 crc kubenswrapper[4779]: I0929 09:47:31.356920 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="785e1b61-3f05-4ac1-ade7-57c25fe6f177" containerName="proxy-httpd" Sep 29 09:47:31 crc kubenswrapper[4779]: I0929 09:47:31.357068 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="785e1b61-3f05-4ac1-ade7-57c25fe6f177" containerName="ceilometer-notification-agent" Sep 29 09:47:31 crc kubenswrapper[4779]: I0929 09:47:31.357082 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="785e1b61-3f05-4ac1-ade7-57c25fe6f177" containerName="ceilometer-central-agent" Sep 29 09:47:31 crc kubenswrapper[4779]: I0929 09:47:31.357094 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="785e1b61-3f05-4ac1-ade7-57c25fe6f177" containerName="sg-core" Sep 29 09:47:31 crc kubenswrapper[4779]: I0929 09:47:31.357106 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="785e1b61-3f05-4ac1-ade7-57c25fe6f177" containerName="proxy-httpd" Sep 29 09:47:31 crc kubenswrapper[4779]: I0929 09:47:31.360677 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 09:47:31 crc kubenswrapper[4779]: I0929 09:47:31.363012 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 09:47:31 crc kubenswrapper[4779]: I0929 09:47:31.363278 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 09:47:31 crc kubenswrapper[4779]: I0929 09:47:31.372409 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 09:47:31 crc kubenswrapper[4779]: I0929 09:47:31.398840 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c32af95a-c3b1-4cba-9c05-d5d787e3ec04-log-httpd\") pod \"ceilometer-0\" (UID: \"c32af95a-c3b1-4cba-9c05-d5d787e3ec04\") " pod="openstack/ceilometer-0" Sep 29 09:47:31 crc kubenswrapper[4779]: I0929 09:47:31.398926 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kbhgt\" (UniqueName: \"kubernetes.io/projected/c32af95a-c3b1-4cba-9c05-d5d787e3ec04-kube-api-access-kbhgt\") pod \"ceilometer-0\" (UID: \"c32af95a-c3b1-4cba-9c05-d5d787e3ec04\") " pod="openstack/ceilometer-0" Sep 29 09:47:31 crc kubenswrapper[4779]: I0929 09:47:31.398968 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c32af95a-c3b1-4cba-9c05-d5d787e3ec04-scripts\") pod \"ceilometer-0\" (UID: \"c32af95a-c3b1-4cba-9c05-d5d787e3ec04\") " pod="openstack/ceilometer-0" Sep 29 09:47:31 crc kubenswrapper[4779]: I0929 09:47:31.399013 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c32af95a-c3b1-4cba-9c05-d5d787e3ec04-config-data\") pod \"ceilometer-0\" (UID: \"c32af95a-c3b1-4cba-9c05-d5d787e3ec04\") " pod="openstack/ceilometer-0" Sep 29 09:47:31 crc kubenswrapper[4779]: I0929 09:47:31.399036 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c32af95a-c3b1-4cba-9c05-d5d787e3ec04-run-httpd\") pod \"ceilometer-0\" (UID: \"c32af95a-c3b1-4cba-9c05-d5d787e3ec04\") " pod="openstack/ceilometer-0" Sep 29 09:47:31 crc kubenswrapper[4779]: I0929 09:47:31.399067 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c32af95a-c3b1-4cba-9c05-d5d787e3ec04-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c32af95a-c3b1-4cba-9c05-d5d787e3ec04\") " pod="openstack/ceilometer-0" Sep 29 09:47:31 crc kubenswrapper[4779]: I0929 09:47:31.399106 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c32af95a-c3b1-4cba-9c05-d5d787e3ec04-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c32af95a-c3b1-4cba-9c05-d5d787e3ec04\") " pod="openstack/ceilometer-0" Sep 29 09:47:31 crc kubenswrapper[4779]: I0929 09:47:31.500156 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c32af95a-c3b1-4cba-9c05-d5d787e3ec04-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c32af95a-c3b1-4cba-9c05-d5d787e3ec04\") " pod="openstack/ceilometer-0" Sep 29 09:47:31 crc kubenswrapper[4779]: I0929 09:47:31.500256 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c32af95a-c3b1-4cba-9c05-d5d787e3ec04-log-httpd\") pod \"ceilometer-0\" (UID: \"c32af95a-c3b1-4cba-9c05-d5d787e3ec04\") " pod="openstack/ceilometer-0" Sep 29 09:47:31 crc kubenswrapper[4779]: I0929 09:47:31.500280 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kbhgt\" (UniqueName: \"kubernetes.io/projected/c32af95a-c3b1-4cba-9c05-d5d787e3ec04-kube-api-access-kbhgt\") pod \"ceilometer-0\" (UID: \"c32af95a-c3b1-4cba-9c05-d5d787e3ec04\") " pod="openstack/ceilometer-0" Sep 29 09:47:31 crc kubenswrapper[4779]: I0929 09:47:31.500314 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c32af95a-c3b1-4cba-9c05-d5d787e3ec04-scripts\") pod \"ceilometer-0\" (UID: \"c32af95a-c3b1-4cba-9c05-d5d787e3ec04\") " pod="openstack/ceilometer-0" Sep 29 09:47:31 crc kubenswrapper[4779]: I0929 09:47:31.500353 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c32af95a-c3b1-4cba-9c05-d5d787e3ec04-config-data\") pod \"ceilometer-0\" (UID: \"c32af95a-c3b1-4cba-9c05-d5d787e3ec04\") " pod="openstack/ceilometer-0" Sep 29 09:47:31 crc kubenswrapper[4779]: I0929 09:47:31.500375 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c32af95a-c3b1-4cba-9c05-d5d787e3ec04-run-httpd\") pod \"ceilometer-0\" (UID: \"c32af95a-c3b1-4cba-9c05-d5d787e3ec04\") " pod="openstack/ceilometer-0" Sep 29 09:47:31 crc kubenswrapper[4779]: I0929 09:47:31.500432 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c32af95a-c3b1-4cba-9c05-d5d787e3ec04-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c32af95a-c3b1-4cba-9c05-d5d787e3ec04\") " pod="openstack/ceilometer-0" Sep 29 09:47:31 crc kubenswrapper[4779]: I0929 09:47:31.502831 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c32af95a-c3b1-4cba-9c05-d5d787e3ec04-run-httpd\") pod \"ceilometer-0\" (UID: \"c32af95a-c3b1-4cba-9c05-d5d787e3ec04\") " pod="openstack/ceilometer-0" Sep 29 09:47:31 crc kubenswrapper[4779]: I0929 09:47:31.504376 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c32af95a-c3b1-4cba-9c05-d5d787e3ec04-log-httpd\") pod \"ceilometer-0\" (UID: \"c32af95a-c3b1-4cba-9c05-d5d787e3ec04\") " pod="openstack/ceilometer-0" Sep 29 09:47:31 crc kubenswrapper[4779]: I0929 09:47:31.509650 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c32af95a-c3b1-4cba-9c05-d5d787e3ec04-config-data\") pod \"ceilometer-0\" (UID: \"c32af95a-c3b1-4cba-9c05-d5d787e3ec04\") " pod="openstack/ceilometer-0" Sep 29 09:47:31 crc kubenswrapper[4779]: I0929 09:47:31.510505 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c32af95a-c3b1-4cba-9c05-d5d787e3ec04-scripts\") pod \"ceilometer-0\" (UID: \"c32af95a-c3b1-4cba-9c05-d5d787e3ec04\") " pod="openstack/ceilometer-0" Sep 29 09:47:31 crc kubenswrapper[4779]: I0929 09:47:31.512737 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c32af95a-c3b1-4cba-9c05-d5d787e3ec04-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c32af95a-c3b1-4cba-9c05-d5d787e3ec04\") " pod="openstack/ceilometer-0" Sep 29 09:47:31 crc kubenswrapper[4779]: I0929 09:47:31.515298 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c32af95a-c3b1-4cba-9c05-d5d787e3ec04-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c32af95a-c3b1-4cba-9c05-d5d787e3ec04\") " pod="openstack/ceilometer-0" Sep 29 09:47:31 crc kubenswrapper[4779]: I0929 09:47:31.530481 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kbhgt\" (UniqueName: \"kubernetes.io/projected/c32af95a-c3b1-4cba-9c05-d5d787e3ec04-kube-api-access-kbhgt\") pod \"ceilometer-0\" (UID: \"c32af95a-c3b1-4cba-9c05-d5d787e3ec04\") " pod="openstack/ceilometer-0" Sep 29 09:47:31 crc kubenswrapper[4779]: I0929 09:47:31.677735 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 09:47:31 crc kubenswrapper[4779]: I0929 09:47:31.715385 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Sep 29 09:47:31 crc kubenswrapper[4779]: I0929 09:47:31.943853 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Sep 29 09:47:32 crc kubenswrapper[4779]: I0929 09:47:32.059644 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"901ac146-e211-4724-a8f5-2c4f9c966bba","Type":"ContainerStarted","Data":"a6c650b0c53164dd193064327bc01184b3fe37e54ee7c7e32bbdb602a475e45a"} Sep 29 09:47:32 crc kubenswrapper[4779]: I0929 09:47:32.106667 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 09:47:32 crc kubenswrapper[4779]: I0929 09:47:32.124871 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-api-0" Sep 29 09:47:32 crc kubenswrapper[4779]: I0929 09:47:32.195038 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 09:47:32 crc kubenswrapper[4779]: I0929 09:47:32.730574 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="785e1b61-3f05-4ac1-ade7-57c25fe6f177" path="/var/lib/kubelet/pods/785e1b61-3f05-4ac1-ade7-57c25fe6f177/volumes" Sep 29 09:47:33 crc kubenswrapper[4779]: I0929 09:47:33.072309 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"901ac146-e211-4724-a8f5-2c4f9c966bba","Type":"ContainerStarted","Data":"fd9556825eba923e78905276320a28396012c28d4429cbf7cfccea838f436d80"} Sep 29 09:47:33 crc kubenswrapper[4779]: I0929 09:47:33.072437 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Sep 29 09:47:33 crc kubenswrapper[4779]: I0929 09:47:33.074187 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c32af95a-c3b1-4cba-9c05-d5d787e3ec04","Type":"ContainerStarted","Data":"8ee03d0fb3f77380679450859183037f4b24ad48cbf1272456089a76f2a92df9"} Sep 29 09:47:33 crc kubenswrapper[4779]: I0929 09:47:33.074413 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c32af95a-c3b1-4cba-9c05-d5d787e3ec04","Type":"ContainerStarted","Data":"e132c7884661a71449a8ca42797b2fcc7c921ab27e35e1294492aabf7d6af489"} Sep 29 09:47:33 crc kubenswrapper[4779]: I0929 09:47:33.074424 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c32af95a-c3b1-4cba-9c05-d5d787e3ec04","Type":"ContainerStarted","Data":"765d444a85f29eca98436cf013dbb2cc977503d2e6173b64bec7cd5c634036cb"} Sep 29 09:47:33 crc kubenswrapper[4779]: I0929 09:47:33.074542 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="bbf17a9c-86ea-4b10-a73f-81758085521e" containerName="cinder-scheduler" containerID="cri-o://2074df38d4a6558b4d8474afd13099749bbe2af1d008cd9731113868080c2d2e" gracePeriod=30 Sep 29 09:47:33 crc kubenswrapper[4779]: I0929 09:47:33.074723 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="bbf17a9c-86ea-4b10-a73f-81758085521e" containerName="probe" containerID="cri-o://bd305a39d446730609ac23985540c5b8f3567b7b346f04f408b464a35558ce7f" gracePeriod=30 Sep 29 09:47:34 crc kubenswrapper[4779]: I0929 09:47:34.083970 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c32af95a-c3b1-4cba-9c05-d5d787e3ec04","Type":"ContainerStarted","Data":"4fe0a80938eae031d5916629d48b0f1da3935242de12693ce390ac59b8bb30e0"} Sep 29 09:47:34 crc kubenswrapper[4779]: I0929 09:47:34.086434 4779 generic.go:334] "Generic (PLEG): container finished" podID="bbf17a9c-86ea-4b10-a73f-81758085521e" containerID="bd305a39d446730609ac23985540c5b8f3567b7b346f04f408b464a35558ce7f" exitCode=0 Sep 29 09:47:34 crc kubenswrapper[4779]: I0929 09:47:34.086530 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"bbf17a9c-86ea-4b10-a73f-81758085521e","Type":"ContainerDied","Data":"bd305a39d446730609ac23985540c5b8f3567b7b346f04f408b464a35558ce7f"} Sep 29 09:47:34 crc kubenswrapper[4779]: I0929 09:47:34.292215 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Sep 29 09:47:34 crc kubenswrapper[4779]: I0929 09:47:34.292265 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Sep 29 09:47:34 crc kubenswrapper[4779]: I0929 09:47:34.293148 4779 scope.go:117] "RemoveContainer" containerID="57d6ed8fa30263cb34a23159118310a096e08b89d83acaef3afa9ecc7a1a48e3" Sep 29 09:47:34 crc kubenswrapper[4779]: E0929 09:47:34.293407 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-decision-engine\" with CrashLoopBackOff: \"back-off 20s restarting failed container=watcher-decision-engine pod=watcher-decision-engine-0_openstack(f67e636b-969b-48ee-bbec-3d8b38b22274)\"" pod="openstack/watcher-decision-engine-0" podUID="f67e636b-969b-48ee-bbec-3d8b38b22274" Sep 29 09:47:34 crc kubenswrapper[4779]: I0929 09:47:34.316875 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=5.31685806 podStartE2EDuration="5.31685806s" podCreationTimestamp="2025-09-29 09:47:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:47:33.10497497 +0000 UTC m=+1085.086298874" watchObservedRunningTime="2025-09-29 09:47:34.31685806 +0000 UTC m=+1086.298181964" Sep 29 09:47:34 crc kubenswrapper[4779]: I0929 09:47:34.619382 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 29 09:47:34 crc kubenswrapper[4779]: I0929 09:47:34.651597 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bbf17a9c-86ea-4b10-a73f-81758085521e-config-data-custom\") pod \"bbf17a9c-86ea-4b10-a73f-81758085521e\" (UID: \"bbf17a9c-86ea-4b10-a73f-81758085521e\") " Sep 29 09:47:34 crc kubenswrapper[4779]: I0929 09:47:34.651829 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bbf17a9c-86ea-4b10-a73f-81758085521e-config-data\") pod \"bbf17a9c-86ea-4b10-a73f-81758085521e\" (UID: \"bbf17a9c-86ea-4b10-a73f-81758085521e\") " Sep 29 09:47:34 crc kubenswrapper[4779]: I0929 09:47:34.651862 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbf17a9c-86ea-4b10-a73f-81758085521e-combined-ca-bundle\") pod \"bbf17a9c-86ea-4b10-a73f-81758085521e\" (UID: \"bbf17a9c-86ea-4b10-a73f-81758085521e\") " Sep 29 09:47:34 crc kubenswrapper[4779]: I0929 09:47:34.651957 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bbf17a9c-86ea-4b10-a73f-81758085521e-scripts\") pod \"bbf17a9c-86ea-4b10-a73f-81758085521e\" (UID: \"bbf17a9c-86ea-4b10-a73f-81758085521e\") " Sep 29 09:47:34 crc kubenswrapper[4779]: I0929 09:47:34.652081 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cjvm8\" (UniqueName: \"kubernetes.io/projected/bbf17a9c-86ea-4b10-a73f-81758085521e-kube-api-access-cjvm8\") pod \"bbf17a9c-86ea-4b10-a73f-81758085521e\" (UID: \"bbf17a9c-86ea-4b10-a73f-81758085521e\") " Sep 29 09:47:34 crc kubenswrapper[4779]: I0929 09:47:34.652757 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bbf17a9c-86ea-4b10-a73f-81758085521e-etc-machine-id\") pod \"bbf17a9c-86ea-4b10-a73f-81758085521e\" (UID: \"bbf17a9c-86ea-4b10-a73f-81758085521e\") " Sep 29 09:47:34 crc kubenswrapper[4779]: I0929 09:47:34.653503 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bbf17a9c-86ea-4b10-a73f-81758085521e-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "bbf17a9c-86ea-4b10-a73f-81758085521e" (UID: "bbf17a9c-86ea-4b10-a73f-81758085521e"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 09:47:34 crc kubenswrapper[4779]: I0929 09:47:34.661260 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bbf17a9c-86ea-4b10-a73f-81758085521e-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "bbf17a9c-86ea-4b10-a73f-81758085521e" (UID: "bbf17a9c-86ea-4b10-a73f-81758085521e"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:47:34 crc kubenswrapper[4779]: I0929 09:47:34.708592 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bbf17a9c-86ea-4b10-a73f-81758085521e-kube-api-access-cjvm8" (OuterVolumeSpecName: "kube-api-access-cjvm8") pod "bbf17a9c-86ea-4b10-a73f-81758085521e" (UID: "bbf17a9c-86ea-4b10-a73f-81758085521e"). InnerVolumeSpecName "kube-api-access-cjvm8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:47:34 crc kubenswrapper[4779]: I0929 09:47:34.715347 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bbf17a9c-86ea-4b10-a73f-81758085521e-scripts" (OuterVolumeSpecName: "scripts") pod "bbf17a9c-86ea-4b10-a73f-81758085521e" (UID: "bbf17a9c-86ea-4b10-a73f-81758085521e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:47:34 crc kubenswrapper[4779]: I0929 09:47:34.755282 4779 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bbf17a9c-86ea-4b10-a73f-81758085521e-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:34 crc kubenswrapper[4779]: I0929 09:47:34.755310 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cjvm8\" (UniqueName: \"kubernetes.io/projected/bbf17a9c-86ea-4b10-a73f-81758085521e-kube-api-access-cjvm8\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:34 crc kubenswrapper[4779]: I0929 09:47:34.755324 4779 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bbf17a9c-86ea-4b10-a73f-81758085521e-etc-machine-id\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:34 crc kubenswrapper[4779]: I0929 09:47:34.755336 4779 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bbf17a9c-86ea-4b10-a73f-81758085521e-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:34 crc kubenswrapper[4779]: I0929 09:47:34.893166 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bbf17a9c-86ea-4b10-a73f-81758085521e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bbf17a9c-86ea-4b10-a73f-81758085521e" (UID: "bbf17a9c-86ea-4b10-a73f-81758085521e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:47:34 crc kubenswrapper[4779]: I0929 09:47:34.943167 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bbf17a9c-86ea-4b10-a73f-81758085521e-config-data" (OuterVolumeSpecName: "config-data") pod "bbf17a9c-86ea-4b10-a73f-81758085521e" (UID: "bbf17a9c-86ea-4b10-a73f-81758085521e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:47:34 crc kubenswrapper[4779]: I0929 09:47:34.958980 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbf17a9c-86ea-4b10-a73f-81758085521e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:34 crc kubenswrapper[4779]: I0929 09:47:34.959008 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bbf17a9c-86ea-4b10-a73f-81758085521e-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:35 crc kubenswrapper[4779]: I0929 09:47:35.096465 4779 generic.go:334] "Generic (PLEG): container finished" podID="bbf17a9c-86ea-4b10-a73f-81758085521e" containerID="2074df38d4a6558b4d8474afd13099749bbe2af1d008cd9731113868080c2d2e" exitCode=0 Sep 29 09:47:35 crc kubenswrapper[4779]: I0929 09:47:35.096527 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"bbf17a9c-86ea-4b10-a73f-81758085521e","Type":"ContainerDied","Data":"2074df38d4a6558b4d8474afd13099749bbe2af1d008cd9731113868080c2d2e"} Sep 29 09:47:35 crc kubenswrapper[4779]: I0929 09:47:35.096533 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 29 09:47:35 crc kubenswrapper[4779]: I0929 09:47:35.096560 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"bbf17a9c-86ea-4b10-a73f-81758085521e","Type":"ContainerDied","Data":"e1f467a122e19cf112529cb725323f776e41700237dae8c5c7f7f38543dbfb1b"} Sep 29 09:47:35 crc kubenswrapper[4779]: I0929 09:47:35.096579 4779 scope.go:117] "RemoveContainer" containerID="bd305a39d446730609ac23985540c5b8f3567b7b346f04f408b464a35558ce7f" Sep 29 09:47:35 crc kubenswrapper[4779]: I0929 09:47:35.097897 4779 generic.go:334] "Generic (PLEG): container finished" podID="ced430a8-7031-48a6-a86e-e827ef13b166" containerID="159a355342623059b2a7d69cd30beedc566ffc4fa9f5341bd02c36a3289d3b85" exitCode=0 Sep 29 09:47:35 crc kubenswrapper[4779]: I0929 09:47:35.097928 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-r5dk2" event={"ID":"ced430a8-7031-48a6-a86e-e827ef13b166","Type":"ContainerDied","Data":"159a355342623059b2a7d69cd30beedc566ffc4fa9f5341bd02c36a3289d3b85"} Sep 29 09:47:35 crc kubenswrapper[4779]: I0929 09:47:35.101354 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c32af95a-c3b1-4cba-9c05-d5d787e3ec04","Type":"ContainerStarted","Data":"9ecb8ba733adbb9319cc45d5b0664bfa8e1d1a9c530ef2da2a257366e545445e"} Sep 29 09:47:35 crc kubenswrapper[4779]: I0929 09:47:35.102331 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 09:47:35 crc kubenswrapper[4779]: I0929 09:47:35.130450 4779 scope.go:117] "RemoveContainer" containerID="2074df38d4a6558b4d8474afd13099749bbe2af1d008cd9731113868080c2d2e" Sep 29 09:47:35 crc kubenswrapper[4779]: I0929 09:47:35.156745 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.652783406 podStartE2EDuration="4.156724408s" podCreationTimestamp="2025-09-29 09:47:31 +0000 UTC" firstStartedPulling="2025-09-29 09:47:32.213090242 +0000 UTC m=+1084.194414146" lastFinishedPulling="2025-09-29 09:47:34.717031244 +0000 UTC m=+1086.698355148" observedRunningTime="2025-09-29 09:47:35.149641564 +0000 UTC m=+1087.130965478" watchObservedRunningTime="2025-09-29 09:47:35.156724408 +0000 UTC m=+1087.138048302" Sep 29 09:47:35 crc kubenswrapper[4779]: I0929 09:47:35.167754 4779 scope.go:117] "RemoveContainer" containerID="bd305a39d446730609ac23985540c5b8f3567b7b346f04f408b464a35558ce7f" Sep 29 09:47:35 crc kubenswrapper[4779]: E0929 09:47:35.172046 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bd305a39d446730609ac23985540c5b8f3567b7b346f04f408b464a35558ce7f\": container with ID starting with bd305a39d446730609ac23985540c5b8f3567b7b346f04f408b464a35558ce7f not found: ID does not exist" containerID="bd305a39d446730609ac23985540c5b8f3567b7b346f04f408b464a35558ce7f" Sep 29 09:47:35 crc kubenswrapper[4779]: I0929 09:47:35.172085 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd305a39d446730609ac23985540c5b8f3567b7b346f04f408b464a35558ce7f"} err="failed to get container status \"bd305a39d446730609ac23985540c5b8f3567b7b346f04f408b464a35558ce7f\": rpc error: code = NotFound desc = could not find container \"bd305a39d446730609ac23985540c5b8f3567b7b346f04f408b464a35558ce7f\": container with ID starting with bd305a39d446730609ac23985540c5b8f3567b7b346f04f408b464a35558ce7f not found: ID does not exist" Sep 29 09:47:35 crc kubenswrapper[4779]: I0929 09:47:35.172111 4779 scope.go:117] "RemoveContainer" containerID="2074df38d4a6558b4d8474afd13099749bbe2af1d008cd9731113868080c2d2e" Sep 29 09:47:35 crc kubenswrapper[4779]: I0929 09:47:35.177004 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 09:47:35 crc kubenswrapper[4779]: E0929 09:47:35.177562 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2074df38d4a6558b4d8474afd13099749bbe2af1d008cd9731113868080c2d2e\": container with ID starting with 2074df38d4a6558b4d8474afd13099749bbe2af1d008cd9731113868080c2d2e not found: ID does not exist" containerID="2074df38d4a6558b4d8474afd13099749bbe2af1d008cd9731113868080c2d2e" Sep 29 09:47:35 crc kubenswrapper[4779]: I0929 09:47:35.177604 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2074df38d4a6558b4d8474afd13099749bbe2af1d008cd9731113868080c2d2e"} err="failed to get container status \"2074df38d4a6558b4d8474afd13099749bbe2af1d008cd9731113868080c2d2e\": rpc error: code = NotFound desc = could not find container \"2074df38d4a6558b4d8474afd13099749bbe2af1d008cd9731113868080c2d2e\": container with ID starting with 2074df38d4a6558b4d8474afd13099749bbe2af1d008cd9731113868080c2d2e not found: ID does not exist" Sep 29 09:47:35 crc kubenswrapper[4779]: I0929 09:47:35.194975 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 09:47:35 crc kubenswrapper[4779]: I0929 09:47:35.199640 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 09:47:35 crc kubenswrapper[4779]: E0929 09:47:35.200001 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bbf17a9c-86ea-4b10-a73f-81758085521e" containerName="probe" Sep 29 09:47:35 crc kubenswrapper[4779]: I0929 09:47:35.200017 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="bbf17a9c-86ea-4b10-a73f-81758085521e" containerName="probe" Sep 29 09:47:35 crc kubenswrapper[4779]: E0929 09:47:35.200029 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bbf17a9c-86ea-4b10-a73f-81758085521e" containerName="cinder-scheduler" Sep 29 09:47:35 crc kubenswrapper[4779]: I0929 09:47:35.200036 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="bbf17a9c-86ea-4b10-a73f-81758085521e" containerName="cinder-scheduler" Sep 29 09:47:35 crc kubenswrapper[4779]: I0929 09:47:35.200246 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="bbf17a9c-86ea-4b10-a73f-81758085521e" containerName="cinder-scheduler" Sep 29 09:47:35 crc kubenswrapper[4779]: I0929 09:47:35.200261 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="bbf17a9c-86ea-4b10-a73f-81758085521e" containerName="probe" Sep 29 09:47:35 crc kubenswrapper[4779]: I0929 09:47:35.237151 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-646f8b549-z5mvs" Sep 29 09:47:35 crc kubenswrapper[4779]: I0929 09:47:35.237664 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 29 09:47:35 crc kubenswrapper[4779]: I0929 09:47:35.244707 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Sep 29 09:47:35 crc kubenswrapper[4779]: I0929 09:47:35.258516 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 09:47:35 crc kubenswrapper[4779]: I0929 09:47:35.265417 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nv6bg\" (UniqueName: \"kubernetes.io/projected/d65424b8-83a8-44d3-8a99-c2dfdd5de5c9-kube-api-access-nv6bg\") pod \"cinder-scheduler-0\" (UID: \"d65424b8-83a8-44d3-8a99-c2dfdd5de5c9\") " pod="openstack/cinder-scheduler-0" Sep 29 09:47:35 crc kubenswrapper[4779]: I0929 09:47:35.265491 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d65424b8-83a8-44d3-8a99-c2dfdd5de5c9-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"d65424b8-83a8-44d3-8a99-c2dfdd5de5c9\") " pod="openstack/cinder-scheduler-0" Sep 29 09:47:35 crc kubenswrapper[4779]: I0929 09:47:35.265541 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d65424b8-83a8-44d3-8a99-c2dfdd5de5c9-config-data\") pod \"cinder-scheduler-0\" (UID: \"d65424b8-83a8-44d3-8a99-c2dfdd5de5c9\") " pod="openstack/cinder-scheduler-0" Sep 29 09:47:35 crc kubenswrapper[4779]: I0929 09:47:35.265572 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d65424b8-83a8-44d3-8a99-c2dfdd5de5c9-scripts\") pod \"cinder-scheduler-0\" (UID: \"d65424b8-83a8-44d3-8a99-c2dfdd5de5c9\") " pod="openstack/cinder-scheduler-0" Sep 29 09:47:35 crc kubenswrapper[4779]: I0929 09:47:35.265618 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d65424b8-83a8-44d3-8a99-c2dfdd5de5c9-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"d65424b8-83a8-44d3-8a99-c2dfdd5de5c9\") " pod="openstack/cinder-scheduler-0" Sep 29 09:47:35 crc kubenswrapper[4779]: I0929 09:47:35.265719 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d65424b8-83a8-44d3-8a99-c2dfdd5de5c9-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"d65424b8-83a8-44d3-8a99-c2dfdd5de5c9\") " pod="openstack/cinder-scheduler-0" Sep 29 09:47:35 crc kubenswrapper[4779]: I0929 09:47:35.367074 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6ffc8d9cff-x6d6w"] Sep 29 09:47:35 crc kubenswrapper[4779]: I0929 09:47:35.368038 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6ffc8d9cff-x6d6w" podUID="5406545d-6af2-453b-bad1-c5d0d58521ff" containerName="dnsmasq-dns" containerID="cri-o://4507e239262228d4799c22c9fb73b0dba34b73e02661f912fac2588ed376c19d" gracePeriod=10 Sep 29 09:47:35 crc kubenswrapper[4779]: I0929 09:47:35.368340 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nv6bg\" (UniqueName: \"kubernetes.io/projected/d65424b8-83a8-44d3-8a99-c2dfdd5de5c9-kube-api-access-nv6bg\") pod \"cinder-scheduler-0\" (UID: \"d65424b8-83a8-44d3-8a99-c2dfdd5de5c9\") " pod="openstack/cinder-scheduler-0" Sep 29 09:47:35 crc kubenswrapper[4779]: I0929 09:47:35.368416 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d65424b8-83a8-44d3-8a99-c2dfdd5de5c9-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"d65424b8-83a8-44d3-8a99-c2dfdd5de5c9\") " pod="openstack/cinder-scheduler-0" Sep 29 09:47:35 crc kubenswrapper[4779]: I0929 09:47:35.368448 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d65424b8-83a8-44d3-8a99-c2dfdd5de5c9-config-data\") pod \"cinder-scheduler-0\" (UID: \"d65424b8-83a8-44d3-8a99-c2dfdd5de5c9\") " pod="openstack/cinder-scheduler-0" Sep 29 09:47:35 crc kubenswrapper[4779]: I0929 09:47:35.368477 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d65424b8-83a8-44d3-8a99-c2dfdd5de5c9-scripts\") pod \"cinder-scheduler-0\" (UID: \"d65424b8-83a8-44d3-8a99-c2dfdd5de5c9\") " pod="openstack/cinder-scheduler-0" Sep 29 09:47:35 crc kubenswrapper[4779]: I0929 09:47:35.368509 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d65424b8-83a8-44d3-8a99-c2dfdd5de5c9-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"d65424b8-83a8-44d3-8a99-c2dfdd5de5c9\") " pod="openstack/cinder-scheduler-0" Sep 29 09:47:35 crc kubenswrapper[4779]: I0929 09:47:35.368587 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d65424b8-83a8-44d3-8a99-c2dfdd5de5c9-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"d65424b8-83a8-44d3-8a99-c2dfdd5de5c9\") " pod="openstack/cinder-scheduler-0" Sep 29 09:47:35 crc kubenswrapper[4779]: I0929 09:47:35.368591 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d65424b8-83a8-44d3-8a99-c2dfdd5de5c9-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"d65424b8-83a8-44d3-8a99-c2dfdd5de5c9\") " pod="openstack/cinder-scheduler-0" Sep 29 09:47:35 crc kubenswrapper[4779]: I0929 09:47:35.377505 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d65424b8-83a8-44d3-8a99-c2dfdd5de5c9-scripts\") pod \"cinder-scheduler-0\" (UID: \"d65424b8-83a8-44d3-8a99-c2dfdd5de5c9\") " pod="openstack/cinder-scheduler-0" Sep 29 09:47:35 crc kubenswrapper[4779]: I0929 09:47:35.385591 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d65424b8-83a8-44d3-8a99-c2dfdd5de5c9-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"d65424b8-83a8-44d3-8a99-c2dfdd5de5c9\") " pod="openstack/cinder-scheduler-0" Sep 29 09:47:35 crc kubenswrapper[4779]: I0929 09:47:35.394489 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nv6bg\" (UniqueName: \"kubernetes.io/projected/d65424b8-83a8-44d3-8a99-c2dfdd5de5c9-kube-api-access-nv6bg\") pod \"cinder-scheduler-0\" (UID: \"d65424b8-83a8-44d3-8a99-c2dfdd5de5c9\") " pod="openstack/cinder-scheduler-0" Sep 29 09:47:35 crc kubenswrapper[4779]: I0929 09:47:35.413543 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d65424b8-83a8-44d3-8a99-c2dfdd5de5c9-config-data\") pod \"cinder-scheduler-0\" (UID: \"d65424b8-83a8-44d3-8a99-c2dfdd5de5c9\") " pod="openstack/cinder-scheduler-0" Sep 29 09:47:35 crc kubenswrapper[4779]: I0929 09:47:35.415594 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d65424b8-83a8-44d3-8a99-c2dfdd5de5c9-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"d65424b8-83a8-44d3-8a99-c2dfdd5de5c9\") " pod="openstack/cinder-scheduler-0" Sep 29 09:47:35 crc kubenswrapper[4779]: E0929 09:47:35.545975 4779 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5406545d_6af2_453b_bad1_c5d0d58521ff.slice/crio-conmon-4507e239262228d4799c22c9fb73b0dba34b73e02661f912fac2588ed376c19d.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5406545d_6af2_453b_bad1_c5d0d58521ff.slice/crio-4507e239262228d4799c22c9fb73b0dba34b73e02661f912fac2588ed376c19d.scope\": RecentStats: unable to find data in memory cache]" Sep 29 09:47:35 crc kubenswrapper[4779]: I0929 09:47:35.655065 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 29 09:47:36 crc kubenswrapper[4779]: I0929 09:47:36.003102 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6ffc8d9cff-x6d6w" Sep 29 09:47:36 crc kubenswrapper[4779]: I0929 09:47:36.086145 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-api-0" Sep 29 09:47:36 crc kubenswrapper[4779]: I0929 09:47:36.095042 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5406545d-6af2-453b-bad1-c5d0d58521ff-config\") pod \"5406545d-6af2-453b-bad1-c5d0d58521ff\" (UID: \"5406545d-6af2-453b-bad1-c5d0d58521ff\") " Sep 29 09:47:36 crc kubenswrapper[4779]: I0929 09:47:36.095126 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5406545d-6af2-453b-bad1-c5d0d58521ff-ovsdbserver-nb\") pod \"5406545d-6af2-453b-bad1-c5d0d58521ff\" (UID: \"5406545d-6af2-453b-bad1-c5d0d58521ff\") " Sep 29 09:47:36 crc kubenswrapper[4779]: I0929 09:47:36.095163 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fg7dc\" (UniqueName: \"kubernetes.io/projected/5406545d-6af2-453b-bad1-c5d0d58521ff-kube-api-access-fg7dc\") pod \"5406545d-6af2-453b-bad1-c5d0d58521ff\" (UID: \"5406545d-6af2-453b-bad1-c5d0d58521ff\") " Sep 29 09:47:36 crc kubenswrapper[4779]: I0929 09:47:36.095215 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5406545d-6af2-453b-bad1-c5d0d58521ff-ovsdbserver-sb\") pod \"5406545d-6af2-453b-bad1-c5d0d58521ff\" (UID: \"5406545d-6af2-453b-bad1-c5d0d58521ff\") " Sep 29 09:47:36 crc kubenswrapper[4779]: I0929 09:47:36.095311 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5406545d-6af2-453b-bad1-c5d0d58521ff-dns-svc\") pod \"5406545d-6af2-453b-bad1-c5d0d58521ff\" (UID: \"5406545d-6af2-453b-bad1-c5d0d58521ff\") " Sep 29 09:47:36 crc kubenswrapper[4779]: I0929 09:47:36.101865 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5406545d-6af2-453b-bad1-c5d0d58521ff-kube-api-access-fg7dc" (OuterVolumeSpecName: "kube-api-access-fg7dc") pod "5406545d-6af2-453b-bad1-c5d0d58521ff" (UID: "5406545d-6af2-453b-bad1-c5d0d58521ff"). InnerVolumeSpecName "kube-api-access-fg7dc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:47:36 crc kubenswrapper[4779]: I0929 09:47:36.106033 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/watcher-api-0" Sep 29 09:47:36 crc kubenswrapper[4779]: I0929 09:47:36.125634 4779 generic.go:334] "Generic (PLEG): container finished" podID="5406545d-6af2-453b-bad1-c5d0d58521ff" containerID="4507e239262228d4799c22c9fb73b0dba34b73e02661f912fac2588ed376c19d" exitCode=0 Sep 29 09:47:36 crc kubenswrapper[4779]: I0929 09:47:36.126741 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6ffc8d9cff-x6d6w" Sep 29 09:47:36 crc kubenswrapper[4779]: I0929 09:47:36.127358 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6ffc8d9cff-x6d6w" event={"ID":"5406545d-6af2-453b-bad1-c5d0d58521ff","Type":"ContainerDied","Data":"4507e239262228d4799c22c9fb73b0dba34b73e02661f912fac2588ed376c19d"} Sep 29 09:47:36 crc kubenswrapper[4779]: I0929 09:47:36.133637 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6ffc8d9cff-x6d6w" event={"ID":"5406545d-6af2-453b-bad1-c5d0d58521ff","Type":"ContainerDied","Data":"121bfccf3e14645e45053c827def35bfe85058e4f3a8d9e48ec04d321bcef36f"} Sep 29 09:47:36 crc kubenswrapper[4779]: I0929 09:47:36.133726 4779 scope.go:117] "RemoveContainer" containerID="4507e239262228d4799c22c9fb73b0dba34b73e02661f912fac2588ed376c19d" Sep 29 09:47:36 crc kubenswrapper[4779]: I0929 09:47:36.158472 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-api-0" Sep 29 09:47:36 crc kubenswrapper[4779]: I0929 09:47:36.172404 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5406545d-6af2-453b-bad1-c5d0d58521ff-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "5406545d-6af2-453b-bad1-c5d0d58521ff" (UID: "5406545d-6af2-453b-bad1-c5d0d58521ff"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:47:36 crc kubenswrapper[4779]: I0929 09:47:36.185536 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5406545d-6af2-453b-bad1-c5d0d58521ff-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5406545d-6af2-453b-bad1-c5d0d58521ff" (UID: "5406545d-6af2-453b-bad1-c5d0d58521ff"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:47:36 crc kubenswrapper[4779]: I0929 09:47:36.200112 4779 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5406545d-6af2-453b-bad1-c5d0d58521ff-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:36 crc kubenswrapper[4779]: I0929 09:47:36.201445 4779 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5406545d-6af2-453b-bad1-c5d0d58521ff-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:36 crc kubenswrapper[4779]: I0929 09:47:36.201554 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fg7dc\" (UniqueName: \"kubernetes.io/projected/5406545d-6af2-453b-bad1-c5d0d58521ff-kube-api-access-fg7dc\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:36 crc kubenswrapper[4779]: I0929 09:47:36.230435 4779 scope.go:117] "RemoveContainer" containerID="87456fb09ed4d074f0627af9fe9c61abe763bd1ca3f19a47437645b44ebf1085" Sep 29 09:47:36 crc kubenswrapper[4779]: I0929 09:47:36.269660 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5406545d-6af2-453b-bad1-c5d0d58521ff-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "5406545d-6af2-453b-bad1-c5d0d58521ff" (UID: "5406545d-6af2-453b-bad1-c5d0d58521ff"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:47:36 crc kubenswrapper[4779]: I0929 09:47:36.303100 4779 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5406545d-6af2-453b-bad1-c5d0d58521ff-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:36 crc kubenswrapper[4779]: I0929 09:47:36.305768 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5406545d-6af2-453b-bad1-c5d0d58521ff-config" (OuterVolumeSpecName: "config") pod "5406545d-6af2-453b-bad1-c5d0d58521ff" (UID: "5406545d-6af2-453b-bad1-c5d0d58521ff"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:47:36 crc kubenswrapper[4779]: I0929 09:47:36.333628 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 09:47:36 crc kubenswrapper[4779]: I0929 09:47:36.388006 4779 scope.go:117] "RemoveContainer" containerID="4507e239262228d4799c22c9fb73b0dba34b73e02661f912fac2588ed376c19d" Sep 29 09:47:36 crc kubenswrapper[4779]: E0929 09:47:36.388617 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4507e239262228d4799c22c9fb73b0dba34b73e02661f912fac2588ed376c19d\": container with ID starting with 4507e239262228d4799c22c9fb73b0dba34b73e02661f912fac2588ed376c19d not found: ID does not exist" containerID="4507e239262228d4799c22c9fb73b0dba34b73e02661f912fac2588ed376c19d" Sep 29 09:47:36 crc kubenswrapper[4779]: I0929 09:47:36.388665 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4507e239262228d4799c22c9fb73b0dba34b73e02661f912fac2588ed376c19d"} err="failed to get container status \"4507e239262228d4799c22c9fb73b0dba34b73e02661f912fac2588ed376c19d\": rpc error: code = NotFound desc = could not find container \"4507e239262228d4799c22c9fb73b0dba34b73e02661f912fac2588ed376c19d\": container with ID starting with 4507e239262228d4799c22c9fb73b0dba34b73e02661f912fac2588ed376c19d not found: ID does not exist" Sep 29 09:47:36 crc kubenswrapper[4779]: I0929 09:47:36.388685 4779 scope.go:117] "RemoveContainer" containerID="87456fb09ed4d074f0627af9fe9c61abe763bd1ca3f19a47437645b44ebf1085" Sep 29 09:47:36 crc kubenswrapper[4779]: E0929 09:47:36.390743 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"87456fb09ed4d074f0627af9fe9c61abe763bd1ca3f19a47437645b44ebf1085\": container with ID starting with 87456fb09ed4d074f0627af9fe9c61abe763bd1ca3f19a47437645b44ebf1085 not found: ID does not exist" containerID="87456fb09ed4d074f0627af9fe9c61abe763bd1ca3f19a47437645b44ebf1085" Sep 29 09:47:36 crc kubenswrapper[4779]: I0929 09:47:36.390774 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"87456fb09ed4d074f0627af9fe9c61abe763bd1ca3f19a47437645b44ebf1085"} err="failed to get container status \"87456fb09ed4d074f0627af9fe9c61abe763bd1ca3f19a47437645b44ebf1085\": rpc error: code = NotFound desc = could not find container \"87456fb09ed4d074f0627af9fe9c61abe763bd1ca3f19a47437645b44ebf1085\": container with ID starting with 87456fb09ed4d074f0627af9fe9c61abe763bd1ca3f19a47437645b44ebf1085 not found: ID does not exist" Sep 29 09:47:36 crc kubenswrapper[4779]: I0929 09:47:36.409096 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5406545d-6af2-453b-bad1-c5d0d58521ff-config\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:36 crc kubenswrapper[4779]: I0929 09:47:36.541313 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6ffc8d9cff-x6d6w"] Sep 29 09:47:36 crc kubenswrapper[4779]: I0929 09:47:36.558909 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6ffc8d9cff-x6d6w"] Sep 29 09:47:36 crc kubenswrapper[4779]: I0929 09:47:36.701858 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-r5dk2" Sep 29 09:47:36 crc kubenswrapper[4779]: I0929 09:47:36.725450 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5406545d-6af2-453b-bad1-c5d0d58521ff" path="/var/lib/kubelet/pods/5406545d-6af2-453b-bad1-c5d0d58521ff/volumes" Sep 29 09:47:36 crc kubenswrapper[4779]: I0929 09:47:36.726062 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bbf17a9c-86ea-4b10-a73f-81758085521e" path="/var/lib/kubelet/pods/bbf17a9c-86ea-4b10-a73f-81758085521e/volumes" Sep 29 09:47:36 crc kubenswrapper[4779]: I0929 09:47:36.816932 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ced430a8-7031-48a6-a86e-e827ef13b166-combined-ca-bundle\") pod \"ced430a8-7031-48a6-a86e-e827ef13b166\" (UID: \"ced430a8-7031-48a6-a86e-e827ef13b166\") " Sep 29 09:47:36 crc kubenswrapper[4779]: I0929 09:47:36.816991 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/ced430a8-7031-48a6-a86e-e827ef13b166-config\") pod \"ced430a8-7031-48a6-a86e-e827ef13b166\" (UID: \"ced430a8-7031-48a6-a86e-e827ef13b166\") " Sep 29 09:47:36 crc kubenswrapper[4779]: I0929 09:47:36.821378 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zq7d5\" (UniqueName: \"kubernetes.io/projected/ced430a8-7031-48a6-a86e-e827ef13b166-kube-api-access-zq7d5\") pod \"ced430a8-7031-48a6-a86e-e827ef13b166\" (UID: \"ced430a8-7031-48a6-a86e-e827ef13b166\") " Sep 29 09:47:36 crc kubenswrapper[4779]: I0929 09:47:36.826056 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ced430a8-7031-48a6-a86e-e827ef13b166-kube-api-access-zq7d5" (OuterVolumeSpecName: "kube-api-access-zq7d5") pod "ced430a8-7031-48a6-a86e-e827ef13b166" (UID: "ced430a8-7031-48a6-a86e-e827ef13b166"). InnerVolumeSpecName "kube-api-access-zq7d5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:47:36 crc kubenswrapper[4779]: I0929 09:47:36.841301 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ced430a8-7031-48a6-a86e-e827ef13b166-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ced430a8-7031-48a6-a86e-e827ef13b166" (UID: "ced430a8-7031-48a6-a86e-e827ef13b166"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:47:36 crc kubenswrapper[4779]: I0929 09:47:36.863125 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ced430a8-7031-48a6-a86e-e827ef13b166-config" (OuterVolumeSpecName: "config") pod "ced430a8-7031-48a6-a86e-e827ef13b166" (UID: "ced430a8-7031-48a6-a86e-e827ef13b166"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:47:36 crc kubenswrapper[4779]: I0929 09:47:36.924297 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zq7d5\" (UniqueName: \"kubernetes.io/projected/ced430a8-7031-48a6-a86e-e827ef13b166-kube-api-access-zq7d5\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:36 crc kubenswrapper[4779]: I0929 09:47:36.924331 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ced430a8-7031-48a6-a86e-e827ef13b166-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:36 crc kubenswrapper[4779]: I0929 09:47:36.924342 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/ced430a8-7031-48a6-a86e-e827ef13b166-config\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:37 crc kubenswrapper[4779]: I0929 09:47:37.157737 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-r5dk2" event={"ID":"ced430a8-7031-48a6-a86e-e827ef13b166","Type":"ContainerDied","Data":"7a2f941ac4de747a8d27d75ab61fc9e0994b835f91a0cf08197945c1ab0b5383"} Sep 29 09:47:37 crc kubenswrapper[4779]: I0929 09:47:37.157799 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7a2f941ac4de747a8d27d75ab61fc9e0994b835f91a0cf08197945c1ab0b5383" Sep 29 09:47:37 crc kubenswrapper[4779]: I0929 09:47:37.157873 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-r5dk2" Sep 29 09:47:37 crc kubenswrapper[4779]: I0929 09:47:37.161551 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"d65424b8-83a8-44d3-8a99-c2dfdd5de5c9","Type":"ContainerStarted","Data":"2ebbc648a0c08b90b59eb6b5713191f37f6e2f4fd616dfef7d7232eee73170cc"} Sep 29 09:47:37 crc kubenswrapper[4779]: I0929 09:47:37.360745 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-77d9688d49-649qz"] Sep 29 09:47:37 crc kubenswrapper[4779]: E0929 09:47:37.361337 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5406545d-6af2-453b-bad1-c5d0d58521ff" containerName="init" Sep 29 09:47:37 crc kubenswrapper[4779]: I0929 09:47:37.361351 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="5406545d-6af2-453b-bad1-c5d0d58521ff" containerName="init" Sep 29 09:47:37 crc kubenswrapper[4779]: E0929 09:47:37.361377 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ced430a8-7031-48a6-a86e-e827ef13b166" containerName="neutron-db-sync" Sep 29 09:47:37 crc kubenswrapper[4779]: I0929 09:47:37.361384 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="ced430a8-7031-48a6-a86e-e827ef13b166" containerName="neutron-db-sync" Sep 29 09:47:37 crc kubenswrapper[4779]: E0929 09:47:37.361398 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5406545d-6af2-453b-bad1-c5d0d58521ff" containerName="dnsmasq-dns" Sep 29 09:47:37 crc kubenswrapper[4779]: I0929 09:47:37.361404 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="5406545d-6af2-453b-bad1-c5d0d58521ff" containerName="dnsmasq-dns" Sep 29 09:47:37 crc kubenswrapper[4779]: I0929 09:47:37.361553 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="ced430a8-7031-48a6-a86e-e827ef13b166" containerName="neutron-db-sync" Sep 29 09:47:37 crc kubenswrapper[4779]: I0929 09:47:37.361574 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="5406545d-6af2-453b-bad1-c5d0d58521ff" containerName="dnsmasq-dns" Sep 29 09:47:37 crc kubenswrapper[4779]: I0929 09:47:37.362512 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77d9688d49-649qz" Sep 29 09:47:37 crc kubenswrapper[4779]: I0929 09:47:37.448061 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ab1c8bfe-eb10-4813-87dc-0df1ea736205-dns-svc\") pod \"dnsmasq-dns-77d9688d49-649qz\" (UID: \"ab1c8bfe-eb10-4813-87dc-0df1ea736205\") " pod="openstack/dnsmasq-dns-77d9688d49-649qz" Sep 29 09:47:37 crc kubenswrapper[4779]: I0929 09:47:37.448136 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ab1c8bfe-eb10-4813-87dc-0df1ea736205-config\") pod \"dnsmasq-dns-77d9688d49-649qz\" (UID: \"ab1c8bfe-eb10-4813-87dc-0df1ea736205\") " pod="openstack/dnsmasq-dns-77d9688d49-649qz" Sep 29 09:47:37 crc kubenswrapper[4779]: I0929 09:47:37.448357 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-smd7n\" (UniqueName: \"kubernetes.io/projected/ab1c8bfe-eb10-4813-87dc-0df1ea736205-kube-api-access-smd7n\") pod \"dnsmasq-dns-77d9688d49-649qz\" (UID: \"ab1c8bfe-eb10-4813-87dc-0df1ea736205\") " pod="openstack/dnsmasq-dns-77d9688d49-649qz" Sep 29 09:47:37 crc kubenswrapper[4779]: I0929 09:47:37.448417 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ab1c8bfe-eb10-4813-87dc-0df1ea736205-ovsdbserver-nb\") pod \"dnsmasq-dns-77d9688d49-649qz\" (UID: \"ab1c8bfe-eb10-4813-87dc-0df1ea736205\") " pod="openstack/dnsmasq-dns-77d9688d49-649qz" Sep 29 09:47:37 crc kubenswrapper[4779]: I0929 09:47:37.448463 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ab1c8bfe-eb10-4813-87dc-0df1ea736205-ovsdbserver-sb\") pod \"dnsmasq-dns-77d9688d49-649qz\" (UID: \"ab1c8bfe-eb10-4813-87dc-0df1ea736205\") " pod="openstack/dnsmasq-dns-77d9688d49-649qz" Sep 29 09:47:37 crc kubenswrapper[4779]: I0929 09:47:37.489197 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77d9688d49-649qz"] Sep 29 09:47:37 crc kubenswrapper[4779]: I0929 09:47:37.535450 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-7fb8cfdfb-ff7cr"] Sep 29 09:47:37 crc kubenswrapper[4779]: I0929 09:47:37.542955 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7fb8cfdfb-ff7cr"] Sep 29 09:47:37 crc kubenswrapper[4779]: I0929 09:47:37.543054 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7fb8cfdfb-ff7cr" Sep 29 09:47:37 crc kubenswrapper[4779]: I0929 09:47:37.557531 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Sep 29 09:47:37 crc kubenswrapper[4779]: I0929 09:47:37.557721 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Sep 29 09:47:37 crc kubenswrapper[4779]: I0929 09:47:37.557977 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-wtz7n" Sep 29 09:47:37 crc kubenswrapper[4779]: I0929 09:47:37.558097 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Sep 29 09:47:37 crc kubenswrapper[4779]: I0929 09:47:37.559121 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-smd7n\" (UniqueName: \"kubernetes.io/projected/ab1c8bfe-eb10-4813-87dc-0df1ea736205-kube-api-access-smd7n\") pod \"dnsmasq-dns-77d9688d49-649qz\" (UID: \"ab1c8bfe-eb10-4813-87dc-0df1ea736205\") " pod="openstack/dnsmasq-dns-77d9688d49-649qz" Sep 29 09:47:37 crc kubenswrapper[4779]: I0929 09:47:37.559153 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ab1c8bfe-eb10-4813-87dc-0df1ea736205-ovsdbserver-nb\") pod \"dnsmasq-dns-77d9688d49-649qz\" (UID: \"ab1c8bfe-eb10-4813-87dc-0df1ea736205\") " pod="openstack/dnsmasq-dns-77d9688d49-649qz" Sep 29 09:47:37 crc kubenswrapper[4779]: I0929 09:47:37.559178 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ab1c8bfe-eb10-4813-87dc-0df1ea736205-ovsdbserver-sb\") pod \"dnsmasq-dns-77d9688d49-649qz\" (UID: \"ab1c8bfe-eb10-4813-87dc-0df1ea736205\") " pod="openstack/dnsmasq-dns-77d9688d49-649qz" Sep 29 09:47:37 crc kubenswrapper[4779]: I0929 09:47:37.559210 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ab1c8bfe-eb10-4813-87dc-0df1ea736205-dns-svc\") pod \"dnsmasq-dns-77d9688d49-649qz\" (UID: \"ab1c8bfe-eb10-4813-87dc-0df1ea736205\") " pod="openstack/dnsmasq-dns-77d9688d49-649qz" Sep 29 09:47:37 crc kubenswrapper[4779]: I0929 09:47:37.559237 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ab1c8bfe-eb10-4813-87dc-0df1ea736205-config\") pod \"dnsmasq-dns-77d9688d49-649qz\" (UID: \"ab1c8bfe-eb10-4813-87dc-0df1ea736205\") " pod="openstack/dnsmasq-dns-77d9688d49-649qz" Sep 29 09:47:37 crc kubenswrapper[4779]: I0929 09:47:37.560474 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ab1c8bfe-eb10-4813-87dc-0df1ea736205-config\") pod \"dnsmasq-dns-77d9688d49-649qz\" (UID: \"ab1c8bfe-eb10-4813-87dc-0df1ea736205\") " pod="openstack/dnsmasq-dns-77d9688d49-649qz" Sep 29 09:47:37 crc kubenswrapper[4779]: I0929 09:47:37.561535 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ab1c8bfe-eb10-4813-87dc-0df1ea736205-ovsdbserver-nb\") pod \"dnsmasq-dns-77d9688d49-649qz\" (UID: \"ab1c8bfe-eb10-4813-87dc-0df1ea736205\") " pod="openstack/dnsmasq-dns-77d9688d49-649qz" Sep 29 09:47:37 crc kubenswrapper[4779]: I0929 09:47:37.562189 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ab1c8bfe-eb10-4813-87dc-0df1ea736205-dns-svc\") pod \"dnsmasq-dns-77d9688d49-649qz\" (UID: \"ab1c8bfe-eb10-4813-87dc-0df1ea736205\") " pod="openstack/dnsmasq-dns-77d9688d49-649qz" Sep 29 09:47:37 crc kubenswrapper[4779]: I0929 09:47:37.562392 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ab1c8bfe-eb10-4813-87dc-0df1ea736205-ovsdbserver-sb\") pod \"dnsmasq-dns-77d9688d49-649qz\" (UID: \"ab1c8bfe-eb10-4813-87dc-0df1ea736205\") " pod="openstack/dnsmasq-dns-77d9688d49-649qz" Sep 29 09:47:37 crc kubenswrapper[4779]: I0929 09:47:37.641116 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-smd7n\" (UniqueName: \"kubernetes.io/projected/ab1c8bfe-eb10-4813-87dc-0df1ea736205-kube-api-access-smd7n\") pod \"dnsmasq-dns-77d9688d49-649qz\" (UID: \"ab1c8bfe-eb10-4813-87dc-0df1ea736205\") " pod="openstack/dnsmasq-dns-77d9688d49-649qz" Sep 29 09:47:37 crc kubenswrapper[4779]: I0929 09:47:37.662403 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ztcqm\" (UniqueName: \"kubernetes.io/projected/c1f1c031-af73-4248-b0ee-6c9a8e6c2a82-kube-api-access-ztcqm\") pod \"neutron-7fb8cfdfb-ff7cr\" (UID: \"c1f1c031-af73-4248-b0ee-6c9a8e6c2a82\") " pod="openstack/neutron-7fb8cfdfb-ff7cr" Sep 29 09:47:37 crc kubenswrapper[4779]: I0929 09:47:37.662730 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/c1f1c031-af73-4248-b0ee-6c9a8e6c2a82-httpd-config\") pod \"neutron-7fb8cfdfb-ff7cr\" (UID: \"c1f1c031-af73-4248-b0ee-6c9a8e6c2a82\") " pod="openstack/neutron-7fb8cfdfb-ff7cr" Sep 29 09:47:37 crc kubenswrapper[4779]: I0929 09:47:37.662763 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c1f1c031-af73-4248-b0ee-6c9a8e6c2a82-ovndb-tls-certs\") pod \"neutron-7fb8cfdfb-ff7cr\" (UID: \"c1f1c031-af73-4248-b0ee-6c9a8e6c2a82\") " pod="openstack/neutron-7fb8cfdfb-ff7cr" Sep 29 09:47:37 crc kubenswrapper[4779]: I0929 09:47:37.662854 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1f1c031-af73-4248-b0ee-6c9a8e6c2a82-combined-ca-bundle\") pod \"neutron-7fb8cfdfb-ff7cr\" (UID: \"c1f1c031-af73-4248-b0ee-6c9a8e6c2a82\") " pod="openstack/neutron-7fb8cfdfb-ff7cr" Sep 29 09:47:37 crc kubenswrapper[4779]: I0929 09:47:37.662937 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/c1f1c031-af73-4248-b0ee-6c9a8e6c2a82-config\") pod \"neutron-7fb8cfdfb-ff7cr\" (UID: \"c1f1c031-af73-4248-b0ee-6c9a8e6c2a82\") " pod="openstack/neutron-7fb8cfdfb-ff7cr" Sep 29 09:47:37 crc kubenswrapper[4779]: I0929 09:47:37.766395 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77d9688d49-649qz" Sep 29 09:47:37 crc kubenswrapper[4779]: I0929 09:47:37.775454 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ztcqm\" (UniqueName: \"kubernetes.io/projected/c1f1c031-af73-4248-b0ee-6c9a8e6c2a82-kube-api-access-ztcqm\") pod \"neutron-7fb8cfdfb-ff7cr\" (UID: \"c1f1c031-af73-4248-b0ee-6c9a8e6c2a82\") " pod="openstack/neutron-7fb8cfdfb-ff7cr" Sep 29 09:47:37 crc kubenswrapper[4779]: I0929 09:47:37.775495 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/c1f1c031-af73-4248-b0ee-6c9a8e6c2a82-httpd-config\") pod \"neutron-7fb8cfdfb-ff7cr\" (UID: \"c1f1c031-af73-4248-b0ee-6c9a8e6c2a82\") " pod="openstack/neutron-7fb8cfdfb-ff7cr" Sep 29 09:47:37 crc kubenswrapper[4779]: I0929 09:47:37.775522 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c1f1c031-af73-4248-b0ee-6c9a8e6c2a82-ovndb-tls-certs\") pod \"neutron-7fb8cfdfb-ff7cr\" (UID: \"c1f1c031-af73-4248-b0ee-6c9a8e6c2a82\") " pod="openstack/neutron-7fb8cfdfb-ff7cr" Sep 29 09:47:37 crc kubenswrapper[4779]: I0929 09:47:37.775589 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1f1c031-af73-4248-b0ee-6c9a8e6c2a82-combined-ca-bundle\") pod \"neutron-7fb8cfdfb-ff7cr\" (UID: \"c1f1c031-af73-4248-b0ee-6c9a8e6c2a82\") " pod="openstack/neutron-7fb8cfdfb-ff7cr" Sep 29 09:47:37 crc kubenswrapper[4779]: I0929 09:47:37.775641 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/c1f1c031-af73-4248-b0ee-6c9a8e6c2a82-config\") pod \"neutron-7fb8cfdfb-ff7cr\" (UID: \"c1f1c031-af73-4248-b0ee-6c9a8e6c2a82\") " pod="openstack/neutron-7fb8cfdfb-ff7cr" Sep 29 09:47:37 crc kubenswrapper[4779]: I0929 09:47:37.785547 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c1f1c031-af73-4248-b0ee-6c9a8e6c2a82-ovndb-tls-certs\") pod \"neutron-7fb8cfdfb-ff7cr\" (UID: \"c1f1c031-af73-4248-b0ee-6c9a8e6c2a82\") " pod="openstack/neutron-7fb8cfdfb-ff7cr" Sep 29 09:47:37 crc kubenswrapper[4779]: I0929 09:47:37.802631 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1f1c031-af73-4248-b0ee-6c9a8e6c2a82-combined-ca-bundle\") pod \"neutron-7fb8cfdfb-ff7cr\" (UID: \"c1f1c031-af73-4248-b0ee-6c9a8e6c2a82\") " pod="openstack/neutron-7fb8cfdfb-ff7cr" Sep 29 09:47:37 crc kubenswrapper[4779]: I0929 09:47:37.818546 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/c1f1c031-af73-4248-b0ee-6c9a8e6c2a82-config\") pod \"neutron-7fb8cfdfb-ff7cr\" (UID: \"c1f1c031-af73-4248-b0ee-6c9a8e6c2a82\") " pod="openstack/neutron-7fb8cfdfb-ff7cr" Sep 29 09:47:37 crc kubenswrapper[4779]: I0929 09:47:37.819044 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/c1f1c031-af73-4248-b0ee-6c9a8e6c2a82-httpd-config\") pod \"neutron-7fb8cfdfb-ff7cr\" (UID: \"c1f1c031-af73-4248-b0ee-6c9a8e6c2a82\") " pod="openstack/neutron-7fb8cfdfb-ff7cr" Sep 29 09:47:37 crc kubenswrapper[4779]: I0929 09:47:37.833161 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ztcqm\" (UniqueName: \"kubernetes.io/projected/c1f1c031-af73-4248-b0ee-6c9a8e6c2a82-kube-api-access-ztcqm\") pod \"neutron-7fb8cfdfb-ff7cr\" (UID: \"c1f1c031-af73-4248-b0ee-6c9a8e6c2a82\") " pod="openstack/neutron-7fb8cfdfb-ff7cr" Sep 29 09:47:37 crc kubenswrapper[4779]: I0929 09:47:37.834815 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-67b68d58cd-fsdcs" Sep 29 09:47:37 crc kubenswrapper[4779]: I0929 09:47:37.984381 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7fb8cfdfb-ff7cr" Sep 29 09:47:38 crc kubenswrapper[4779]: I0929 09:47:38.022389 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-77d4c87646-5g4ds" Sep 29 09:47:38 crc kubenswrapper[4779]: I0929 09:47:38.222268 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"d65424b8-83a8-44d3-8a99-c2dfdd5de5c9","Type":"ContainerStarted","Data":"c6ea388a643f9de2f4b9855e3c2dfe0e276442b9d53c91df455fb0b2085b6457"} Sep 29 09:47:38 crc kubenswrapper[4779]: I0929 09:47:38.409908 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-67b68d58cd-fsdcs" Sep 29 09:47:38 crc kubenswrapper[4779]: I0929 09:47:38.497310 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77d9688d49-649qz"] Sep 29 09:47:38 crc kubenswrapper[4779]: I0929 09:47:38.540218 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-77d4c87646-5g4ds" Sep 29 09:47:39 crc kubenswrapper[4779]: W0929 09:47:39.143356 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc1f1c031_af73_4248_b0ee_6c9a8e6c2a82.slice/crio-b3a4c1e024e4f8d359230221762e71495357df9efd56905b1a6f4405d499dcae WatchSource:0}: Error finding container b3a4c1e024e4f8d359230221762e71495357df9efd56905b1a6f4405d499dcae: Status 404 returned error can't find the container with id b3a4c1e024e4f8d359230221762e71495357df9efd56905b1a6f4405d499dcae Sep 29 09:47:39 crc kubenswrapper[4779]: I0929 09:47:39.146018 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7fb8cfdfb-ff7cr"] Sep 29 09:47:39 crc kubenswrapper[4779]: I0929 09:47:39.262479 4779 generic.go:334] "Generic (PLEG): container finished" podID="ab1c8bfe-eb10-4813-87dc-0df1ea736205" containerID="211259e34db11dd31d7e7753dca18f0ebf98d95f5551ab977a8a8322c6a848ec" exitCode=0 Sep 29 09:47:39 crc kubenswrapper[4779]: I0929 09:47:39.262789 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77d9688d49-649qz" event={"ID":"ab1c8bfe-eb10-4813-87dc-0df1ea736205","Type":"ContainerDied","Data":"211259e34db11dd31d7e7753dca18f0ebf98d95f5551ab977a8a8322c6a848ec"} Sep 29 09:47:39 crc kubenswrapper[4779]: I0929 09:47:39.262817 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77d9688d49-649qz" event={"ID":"ab1c8bfe-eb10-4813-87dc-0df1ea736205","Type":"ContainerStarted","Data":"2978623c6571cde712c0edb24a0d6df1d39db6eb2d22a744dfaf8785b1808dc6"} Sep 29 09:47:39 crc kubenswrapper[4779]: I0929 09:47:39.285510 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7fb8cfdfb-ff7cr" event={"ID":"c1f1c031-af73-4248-b0ee-6c9a8e6c2a82","Type":"ContainerStarted","Data":"b3a4c1e024e4f8d359230221762e71495357df9efd56905b1a6f4405d499dcae"} Sep 29 09:47:39 crc kubenswrapper[4779]: I0929 09:47:39.300763 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"d65424b8-83a8-44d3-8a99-c2dfdd5de5c9","Type":"ContainerStarted","Data":"a1c482c2f973eaba6b4e04fe85ae774ae3ca66eff7e846f651ea27c31b4f2523"} Sep 29 09:47:39 crc kubenswrapper[4779]: I0929 09:47:39.333192 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=4.333174617 podStartE2EDuration="4.333174617s" podCreationTimestamp="2025-09-29 09:47:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:47:39.316869257 +0000 UTC m=+1091.298193171" watchObservedRunningTime="2025-09-29 09:47:39.333174617 +0000 UTC m=+1091.314498521" Sep 29 09:47:40 crc kubenswrapper[4779]: I0929 09:47:40.312293 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7fb8cfdfb-ff7cr" event={"ID":"c1f1c031-af73-4248-b0ee-6c9a8e6c2a82","Type":"ContainerStarted","Data":"eaaf7c3292add81067ab0de1f5e10235575d9488e8459c9f440fd91e212eb934"} Sep 29 09:47:40 crc kubenswrapper[4779]: I0929 09:47:40.312626 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7fb8cfdfb-ff7cr" event={"ID":"c1f1c031-af73-4248-b0ee-6c9a8e6c2a82","Type":"ContainerStarted","Data":"87be15ca283a6fc588a7296ec4ea350af474a0d635f62c88358442ff580a39de"} Sep 29 09:47:40 crc kubenswrapper[4779]: I0929 09:47:40.313109 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-7fb8cfdfb-ff7cr" Sep 29 09:47:40 crc kubenswrapper[4779]: I0929 09:47:40.320933 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77d9688d49-649qz" event={"ID":"ab1c8bfe-eb10-4813-87dc-0df1ea736205","Type":"ContainerStarted","Data":"8dc7293ff6c7eb5e144eaa1c94f807f49d8892149ac27496adbd77f5afa9c592"} Sep 29 09:47:40 crc kubenswrapper[4779]: I0929 09:47:40.320968 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-77d9688d49-649qz" Sep 29 09:47:40 crc kubenswrapper[4779]: I0929 09:47:40.342224 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-7fb8cfdfb-ff7cr" podStartSLOduration=3.342197904 podStartE2EDuration="3.342197904s" podCreationTimestamp="2025-09-29 09:47:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:47:40.333508623 +0000 UTC m=+1092.314832527" watchObservedRunningTime="2025-09-29 09:47:40.342197904 +0000 UTC m=+1092.323521808" Sep 29 09:47:40 crc kubenswrapper[4779]: I0929 09:47:40.359293 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-77d9688d49-649qz" podStartSLOduration=3.359271586 podStartE2EDuration="3.359271586s" podCreationTimestamp="2025-09-29 09:47:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:47:40.353748257 +0000 UTC m=+1092.335072171" watchObservedRunningTime="2025-09-29 09:47:40.359271586 +0000 UTC m=+1092.340595490" Sep 29 09:47:40 crc kubenswrapper[4779]: I0929 09:47:40.656203 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Sep 29 09:47:40 crc kubenswrapper[4779]: I0929 09:47:40.764152 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-68487ccd79-zvm96"] Sep 29 09:47:40 crc kubenswrapper[4779]: I0929 09:47:40.765704 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-68487ccd79-zvm96" Sep 29 09:47:40 crc kubenswrapper[4779]: I0929 09:47:40.767957 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Sep 29 09:47:40 crc kubenswrapper[4779]: I0929 09:47:40.768451 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Sep 29 09:47:40 crc kubenswrapper[4779]: I0929 09:47:40.775707 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-68487ccd79-zvm96"] Sep 29 09:47:40 crc kubenswrapper[4779]: I0929 09:47:40.846156 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/cc248e32-59fd-44c1-ab42-636f4a81a203-httpd-config\") pod \"neutron-68487ccd79-zvm96\" (UID: \"cc248e32-59fd-44c1-ab42-636f4a81a203\") " pod="openstack/neutron-68487ccd79-zvm96" Sep 29 09:47:40 crc kubenswrapper[4779]: I0929 09:47:40.846236 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc248e32-59fd-44c1-ab42-636f4a81a203-public-tls-certs\") pod \"neutron-68487ccd79-zvm96\" (UID: \"cc248e32-59fd-44c1-ab42-636f4a81a203\") " pod="openstack/neutron-68487ccd79-zvm96" Sep 29 09:47:40 crc kubenswrapper[4779]: I0929 09:47:40.846264 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/cc248e32-59fd-44c1-ab42-636f4a81a203-config\") pod \"neutron-68487ccd79-zvm96\" (UID: \"cc248e32-59fd-44c1-ab42-636f4a81a203\") " pod="openstack/neutron-68487ccd79-zvm96" Sep 29 09:47:40 crc kubenswrapper[4779]: I0929 09:47:40.846295 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc248e32-59fd-44c1-ab42-636f4a81a203-ovndb-tls-certs\") pod \"neutron-68487ccd79-zvm96\" (UID: \"cc248e32-59fd-44c1-ab42-636f4a81a203\") " pod="openstack/neutron-68487ccd79-zvm96" Sep 29 09:47:40 crc kubenswrapper[4779]: I0929 09:47:40.846311 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4m97z\" (UniqueName: \"kubernetes.io/projected/cc248e32-59fd-44c1-ab42-636f4a81a203-kube-api-access-4m97z\") pod \"neutron-68487ccd79-zvm96\" (UID: \"cc248e32-59fd-44c1-ab42-636f4a81a203\") " pod="openstack/neutron-68487ccd79-zvm96" Sep 29 09:47:40 crc kubenswrapper[4779]: I0929 09:47:40.846333 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc248e32-59fd-44c1-ab42-636f4a81a203-internal-tls-certs\") pod \"neutron-68487ccd79-zvm96\" (UID: \"cc248e32-59fd-44c1-ab42-636f4a81a203\") " pod="openstack/neutron-68487ccd79-zvm96" Sep 29 09:47:40 crc kubenswrapper[4779]: I0929 09:47:40.846360 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc248e32-59fd-44c1-ab42-636f4a81a203-combined-ca-bundle\") pod \"neutron-68487ccd79-zvm96\" (UID: \"cc248e32-59fd-44c1-ab42-636f4a81a203\") " pod="openstack/neutron-68487ccd79-zvm96" Sep 29 09:47:40 crc kubenswrapper[4779]: I0929 09:47:40.948186 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc248e32-59fd-44c1-ab42-636f4a81a203-ovndb-tls-certs\") pod \"neutron-68487ccd79-zvm96\" (UID: \"cc248e32-59fd-44c1-ab42-636f4a81a203\") " pod="openstack/neutron-68487ccd79-zvm96" Sep 29 09:47:40 crc kubenswrapper[4779]: I0929 09:47:40.948228 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4m97z\" (UniqueName: \"kubernetes.io/projected/cc248e32-59fd-44c1-ab42-636f4a81a203-kube-api-access-4m97z\") pod \"neutron-68487ccd79-zvm96\" (UID: \"cc248e32-59fd-44c1-ab42-636f4a81a203\") " pod="openstack/neutron-68487ccd79-zvm96" Sep 29 09:47:40 crc kubenswrapper[4779]: I0929 09:47:40.948265 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc248e32-59fd-44c1-ab42-636f4a81a203-internal-tls-certs\") pod \"neutron-68487ccd79-zvm96\" (UID: \"cc248e32-59fd-44c1-ab42-636f4a81a203\") " pod="openstack/neutron-68487ccd79-zvm96" Sep 29 09:47:40 crc kubenswrapper[4779]: I0929 09:47:40.948294 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc248e32-59fd-44c1-ab42-636f4a81a203-combined-ca-bundle\") pod \"neutron-68487ccd79-zvm96\" (UID: \"cc248e32-59fd-44c1-ab42-636f4a81a203\") " pod="openstack/neutron-68487ccd79-zvm96" Sep 29 09:47:40 crc kubenswrapper[4779]: I0929 09:47:40.948357 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/cc248e32-59fd-44c1-ab42-636f4a81a203-httpd-config\") pod \"neutron-68487ccd79-zvm96\" (UID: \"cc248e32-59fd-44c1-ab42-636f4a81a203\") " pod="openstack/neutron-68487ccd79-zvm96" Sep 29 09:47:40 crc kubenswrapper[4779]: I0929 09:47:40.948413 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc248e32-59fd-44c1-ab42-636f4a81a203-public-tls-certs\") pod \"neutron-68487ccd79-zvm96\" (UID: \"cc248e32-59fd-44c1-ab42-636f4a81a203\") " pod="openstack/neutron-68487ccd79-zvm96" Sep 29 09:47:40 crc kubenswrapper[4779]: I0929 09:47:40.948436 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/cc248e32-59fd-44c1-ab42-636f4a81a203-config\") pod \"neutron-68487ccd79-zvm96\" (UID: \"cc248e32-59fd-44c1-ab42-636f4a81a203\") " pod="openstack/neutron-68487ccd79-zvm96" Sep 29 09:47:40 crc kubenswrapper[4779]: I0929 09:47:40.956067 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc248e32-59fd-44c1-ab42-636f4a81a203-public-tls-certs\") pod \"neutron-68487ccd79-zvm96\" (UID: \"cc248e32-59fd-44c1-ab42-636f4a81a203\") " pod="openstack/neutron-68487ccd79-zvm96" Sep 29 09:47:40 crc kubenswrapper[4779]: I0929 09:47:40.957484 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/cc248e32-59fd-44c1-ab42-636f4a81a203-httpd-config\") pod \"neutron-68487ccd79-zvm96\" (UID: \"cc248e32-59fd-44c1-ab42-636f4a81a203\") " pod="openstack/neutron-68487ccd79-zvm96" Sep 29 09:47:40 crc kubenswrapper[4779]: I0929 09:47:40.957990 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc248e32-59fd-44c1-ab42-636f4a81a203-ovndb-tls-certs\") pod \"neutron-68487ccd79-zvm96\" (UID: \"cc248e32-59fd-44c1-ab42-636f4a81a203\") " pod="openstack/neutron-68487ccd79-zvm96" Sep 29 09:47:40 crc kubenswrapper[4779]: I0929 09:47:40.958754 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc248e32-59fd-44c1-ab42-636f4a81a203-internal-tls-certs\") pod \"neutron-68487ccd79-zvm96\" (UID: \"cc248e32-59fd-44c1-ab42-636f4a81a203\") " pod="openstack/neutron-68487ccd79-zvm96" Sep 29 09:47:40 crc kubenswrapper[4779]: I0929 09:47:40.963635 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/cc248e32-59fd-44c1-ab42-636f4a81a203-config\") pod \"neutron-68487ccd79-zvm96\" (UID: \"cc248e32-59fd-44c1-ab42-636f4a81a203\") " pod="openstack/neutron-68487ccd79-zvm96" Sep 29 09:47:40 crc kubenswrapper[4779]: I0929 09:47:40.966805 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4m97z\" (UniqueName: \"kubernetes.io/projected/cc248e32-59fd-44c1-ab42-636f4a81a203-kube-api-access-4m97z\") pod \"neutron-68487ccd79-zvm96\" (UID: \"cc248e32-59fd-44c1-ab42-636f4a81a203\") " pod="openstack/neutron-68487ccd79-zvm96" Sep 29 09:47:40 crc kubenswrapper[4779]: I0929 09:47:40.970710 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc248e32-59fd-44c1-ab42-636f4a81a203-combined-ca-bundle\") pod \"neutron-68487ccd79-zvm96\" (UID: \"cc248e32-59fd-44c1-ab42-636f4a81a203\") " pod="openstack/neutron-68487ccd79-zvm96" Sep 29 09:47:41 crc kubenswrapper[4779]: I0929 09:47:41.109226 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-68487ccd79-zvm96" Sep 29 09:47:41 crc kubenswrapper[4779]: I0929 09:47:41.581691 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-7fd47db598-vbs8p" Sep 29 09:47:41 crc kubenswrapper[4779]: I0929 09:47:41.626380 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-7fd47db598-vbs8p" Sep 29 09:47:41 crc kubenswrapper[4779]: I0929 09:47:41.683826 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-77d4c87646-5g4ds"] Sep 29 09:47:41 crc kubenswrapper[4779]: I0929 09:47:41.684097 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-77d4c87646-5g4ds" podUID="cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45" containerName="barbican-api-log" containerID="cri-o://c1374c457de1faba919074fd86fb446b0819c3f1acc863d7015c2170fe8701a4" gracePeriod=30 Sep 29 09:47:41 crc kubenswrapper[4779]: I0929 09:47:41.684579 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-77d4c87646-5g4ds" podUID="cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45" containerName="barbican-api" containerID="cri-o://ca53cbfe52044307f1b95d50744c6d4dc93502387524bd699c8fca021f4cdbb8" gracePeriod=30 Sep 29 09:47:41 crc kubenswrapper[4779]: I0929 09:47:41.880545 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-68487ccd79-zvm96"] Sep 29 09:47:41 crc kubenswrapper[4779]: W0929 09:47:41.904121 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcc248e32_59fd_44c1_ab42_636f4a81a203.slice/crio-aa137bd2f1d44cb8cc655860bf976cf86a73ac1f74e94b17e8d47dfdf557b789 WatchSource:0}: Error finding container aa137bd2f1d44cb8cc655860bf976cf86a73ac1f74e94b17e8d47dfdf557b789: Status 404 returned error can't find the container with id aa137bd2f1d44cb8cc655860bf976cf86a73ac1f74e94b17e8d47dfdf557b789 Sep 29 09:47:42 crc kubenswrapper[4779]: I0929 09:47:42.374107 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-68487ccd79-zvm96" event={"ID":"cc248e32-59fd-44c1-ab42-636f4a81a203","Type":"ContainerStarted","Data":"8683913a52777b8e26c7d800ccd31a16030d76483ffcc2c78982eb77b455fc25"} Sep 29 09:47:42 crc kubenswrapper[4779]: I0929 09:47:42.374430 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-68487ccd79-zvm96" event={"ID":"cc248e32-59fd-44c1-ab42-636f4a81a203","Type":"ContainerStarted","Data":"aa137bd2f1d44cb8cc655860bf976cf86a73ac1f74e94b17e8d47dfdf557b789"} Sep 29 09:47:42 crc kubenswrapper[4779]: I0929 09:47:42.392523 4779 generic.go:334] "Generic (PLEG): container finished" podID="cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45" containerID="c1374c457de1faba919074fd86fb446b0819c3f1acc863d7015c2170fe8701a4" exitCode=143 Sep 29 09:47:42 crc kubenswrapper[4779]: I0929 09:47:42.392706 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-77d4c87646-5g4ds" event={"ID":"cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45","Type":"ContainerDied","Data":"c1374c457de1faba919074fd86fb446b0819c3f1acc863d7015c2170fe8701a4"} Sep 29 09:47:43 crc kubenswrapper[4779]: I0929 09:47:43.363547 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Sep 29 09:47:43 crc kubenswrapper[4779]: I0929 09:47:43.428942 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-68487ccd79-zvm96" event={"ID":"cc248e32-59fd-44c1-ab42-636f4a81a203","Type":"ContainerStarted","Data":"dd0ef0e1a3faceb7ca41f6c39f247c064c5c8187f265bb9f919a3b4eaf4b0b24"} Sep 29 09:47:43 crc kubenswrapper[4779]: I0929 09:47:43.429500 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-68487ccd79-zvm96" Sep 29 09:47:43 crc kubenswrapper[4779]: I0929 09:47:43.461749 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-68487ccd79-zvm96" podStartSLOduration=3.461724594 podStartE2EDuration="3.461724594s" podCreationTimestamp="2025-09-29 09:47:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:47:43.457499563 +0000 UTC m=+1095.438823487" watchObservedRunningTime="2025-09-29 09:47:43.461724594 +0000 UTC m=+1095.443048498" Sep 29 09:47:43 crc kubenswrapper[4779]: I0929 09:47:43.916726 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-77d4c87646-5g4ds" Sep 29 09:47:44 crc kubenswrapper[4779]: I0929 09:47:44.013701 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45-combined-ca-bundle\") pod \"cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45\" (UID: \"cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45\") " Sep 29 09:47:44 crc kubenswrapper[4779]: I0929 09:47:44.014239 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m6rb8\" (UniqueName: \"kubernetes.io/projected/cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45-kube-api-access-m6rb8\") pod \"cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45\" (UID: \"cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45\") " Sep 29 09:47:44 crc kubenswrapper[4779]: I0929 09:47:44.014342 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45-config-data\") pod \"cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45\" (UID: \"cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45\") " Sep 29 09:47:44 crc kubenswrapper[4779]: I0929 09:47:44.014612 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45-config-data-custom\") pod \"cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45\" (UID: \"cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45\") " Sep 29 09:47:44 crc kubenswrapper[4779]: I0929 09:47:44.016042 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45-logs\") pod \"cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45\" (UID: \"cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45\") " Sep 29 09:47:44 crc kubenswrapper[4779]: I0929 09:47:44.017583 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45-logs" (OuterVolumeSpecName: "logs") pod "cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45" (UID: "cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:47:44 crc kubenswrapper[4779]: I0929 09:47:44.023076 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45" (UID: "cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:47:44 crc kubenswrapper[4779]: I0929 09:47:44.026301 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45-kube-api-access-m6rb8" (OuterVolumeSpecName: "kube-api-access-m6rb8") pod "cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45" (UID: "cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45"). InnerVolumeSpecName "kube-api-access-m6rb8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:47:44 crc kubenswrapper[4779]: I0929 09:47:44.052991 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45" (UID: "cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:47:44 crc kubenswrapper[4779]: I0929 09:47:44.093095 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45-config-data" (OuterVolumeSpecName: "config-data") pod "cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45" (UID: "cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:47:44 crc kubenswrapper[4779]: I0929 09:47:44.119408 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:44 crc kubenswrapper[4779]: I0929 09:47:44.119448 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m6rb8\" (UniqueName: \"kubernetes.io/projected/cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45-kube-api-access-m6rb8\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:44 crc kubenswrapper[4779]: I0929 09:47:44.119466 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:44 crc kubenswrapper[4779]: I0929 09:47:44.119476 4779 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:44 crc kubenswrapper[4779]: I0929 09:47:44.119489 4779 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45-logs\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:44 crc kubenswrapper[4779]: I0929 09:47:44.291960 4779 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack/watcher-decision-engine-0" Sep 29 09:47:44 crc kubenswrapper[4779]: I0929 09:47:44.292016 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-decision-engine-0" Sep 29 09:47:44 crc kubenswrapper[4779]: I0929 09:47:44.292756 4779 scope.go:117] "RemoveContainer" containerID="57d6ed8fa30263cb34a23159118310a096e08b89d83acaef3afa9ecc7a1a48e3" Sep 29 09:47:44 crc kubenswrapper[4779]: E0929 09:47:44.293208 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-decision-engine\" with CrashLoopBackOff: \"back-off 20s restarting failed container=watcher-decision-engine pod=watcher-decision-engine-0_openstack(f67e636b-969b-48ee-bbec-3d8b38b22274)\"" pod="openstack/watcher-decision-engine-0" podUID="f67e636b-969b-48ee-bbec-3d8b38b22274" Sep 29 09:47:44 crc kubenswrapper[4779]: I0929 09:47:44.438803 4779 generic.go:334] "Generic (PLEG): container finished" podID="cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45" containerID="ca53cbfe52044307f1b95d50744c6d4dc93502387524bd699c8fca021f4cdbb8" exitCode=0 Sep 29 09:47:44 crc kubenswrapper[4779]: I0929 09:47:44.439712 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-77d4c87646-5g4ds" Sep 29 09:47:44 crc kubenswrapper[4779]: I0929 09:47:44.450693 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-77d4c87646-5g4ds" event={"ID":"cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45","Type":"ContainerDied","Data":"ca53cbfe52044307f1b95d50744c6d4dc93502387524bd699c8fca021f4cdbb8"} Sep 29 09:47:44 crc kubenswrapper[4779]: I0929 09:47:44.450745 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-77d4c87646-5g4ds" event={"ID":"cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45","Type":"ContainerDied","Data":"20d7b637393245d57df389a420c8e1305707d935c7441aed32e2b96b2ce32fb9"} Sep 29 09:47:44 crc kubenswrapper[4779]: I0929 09:47:44.450770 4779 scope.go:117] "RemoveContainer" containerID="ca53cbfe52044307f1b95d50744c6d4dc93502387524bd699c8fca021f4cdbb8" Sep 29 09:47:44 crc kubenswrapper[4779]: I0929 09:47:44.478718 4779 scope.go:117] "RemoveContainer" containerID="c1374c457de1faba919074fd86fb446b0819c3f1acc863d7015c2170fe8701a4" Sep 29 09:47:44 crc kubenswrapper[4779]: I0929 09:47:44.482630 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-77d4c87646-5g4ds"] Sep 29 09:47:44 crc kubenswrapper[4779]: I0929 09:47:44.493676 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-77d4c87646-5g4ds"] Sep 29 09:47:44 crc kubenswrapper[4779]: I0929 09:47:44.503348 4779 scope.go:117] "RemoveContainer" containerID="ca53cbfe52044307f1b95d50744c6d4dc93502387524bd699c8fca021f4cdbb8" Sep 29 09:47:44 crc kubenswrapper[4779]: E0929 09:47:44.503699 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ca53cbfe52044307f1b95d50744c6d4dc93502387524bd699c8fca021f4cdbb8\": container with ID starting with ca53cbfe52044307f1b95d50744c6d4dc93502387524bd699c8fca021f4cdbb8 not found: ID does not exist" containerID="ca53cbfe52044307f1b95d50744c6d4dc93502387524bd699c8fca021f4cdbb8" Sep 29 09:47:44 crc kubenswrapper[4779]: I0929 09:47:44.503736 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ca53cbfe52044307f1b95d50744c6d4dc93502387524bd699c8fca021f4cdbb8"} err="failed to get container status \"ca53cbfe52044307f1b95d50744c6d4dc93502387524bd699c8fca021f4cdbb8\": rpc error: code = NotFound desc = could not find container \"ca53cbfe52044307f1b95d50744c6d4dc93502387524bd699c8fca021f4cdbb8\": container with ID starting with ca53cbfe52044307f1b95d50744c6d4dc93502387524bd699c8fca021f4cdbb8 not found: ID does not exist" Sep 29 09:47:44 crc kubenswrapper[4779]: I0929 09:47:44.503763 4779 scope.go:117] "RemoveContainer" containerID="c1374c457de1faba919074fd86fb446b0819c3f1acc863d7015c2170fe8701a4" Sep 29 09:47:44 crc kubenswrapper[4779]: E0929 09:47:44.504005 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c1374c457de1faba919074fd86fb446b0819c3f1acc863d7015c2170fe8701a4\": container with ID starting with c1374c457de1faba919074fd86fb446b0819c3f1acc863d7015c2170fe8701a4 not found: ID does not exist" containerID="c1374c457de1faba919074fd86fb446b0819c3f1acc863d7015c2170fe8701a4" Sep 29 09:47:44 crc kubenswrapper[4779]: I0929 09:47:44.504027 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c1374c457de1faba919074fd86fb446b0819c3f1acc863d7015c2170fe8701a4"} err="failed to get container status \"c1374c457de1faba919074fd86fb446b0819c3f1acc863d7015c2170fe8701a4\": rpc error: code = NotFound desc = could not find container \"c1374c457de1faba919074fd86fb446b0819c3f1acc863d7015c2170fe8701a4\": container with ID starting with c1374c457de1faba919074fd86fb446b0819c3f1acc863d7015c2170fe8701a4 not found: ID does not exist" Sep 29 09:47:44 crc kubenswrapper[4779]: I0929 09:47:44.726416 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45" path="/var/lib/kubelet/pods/cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45/volumes" Sep 29 09:47:45 crc kubenswrapper[4779]: I0929 09:47:45.814852 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Sep 29 09:47:46 crc kubenswrapper[4779]: I0929 09:47:46.222975 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-864d875667-zb59s" Sep 29 09:47:47 crc kubenswrapper[4779]: I0929 09:47:47.768091 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-77d9688d49-649qz" Sep 29 09:47:47 crc kubenswrapper[4779]: I0929 09:47:47.821348 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-646f8b549-z5mvs"] Sep 29 09:47:47 crc kubenswrapper[4779]: I0929 09:47:47.821553 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-646f8b549-z5mvs" podUID="30b112ba-d459-4229-a0d5-a327cdf7d39a" containerName="dnsmasq-dns" containerID="cri-o://797dbaa177f801ec5b0bc4880bb48c6bc51054477e09eed659029962b71a0084" gracePeriod=10 Sep 29 09:47:48 crc kubenswrapper[4779]: I0929 09:47:48.507556 4779 generic.go:334] "Generic (PLEG): container finished" podID="30b112ba-d459-4229-a0d5-a327cdf7d39a" containerID="797dbaa177f801ec5b0bc4880bb48c6bc51054477e09eed659029962b71a0084" exitCode=0 Sep 29 09:47:48 crc kubenswrapper[4779]: I0929 09:47:48.507796 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-646f8b549-z5mvs" event={"ID":"30b112ba-d459-4229-a0d5-a327cdf7d39a","Type":"ContainerDied","Data":"797dbaa177f801ec5b0bc4880bb48c6bc51054477e09eed659029962b71a0084"} Sep 29 09:47:48 crc kubenswrapper[4779]: I0929 09:47:48.507947 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-646f8b549-z5mvs" event={"ID":"30b112ba-d459-4229-a0d5-a327cdf7d39a","Type":"ContainerDied","Data":"110aa42efb22aa4994c62725b4678e7d5dd58fce9a18d685506905c834046e67"} Sep 29 09:47:48 crc kubenswrapper[4779]: I0929 09:47:48.507971 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="110aa42efb22aa4994c62725b4678e7d5dd58fce9a18d685506905c834046e67" Sep 29 09:47:48 crc kubenswrapper[4779]: I0929 09:47:48.557815 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-646f8b549-z5mvs" Sep 29 09:47:48 crc kubenswrapper[4779]: I0929 09:47:48.704075 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/30b112ba-d459-4229-a0d5-a327cdf7d39a-dns-svc\") pod \"30b112ba-d459-4229-a0d5-a327cdf7d39a\" (UID: \"30b112ba-d459-4229-a0d5-a327cdf7d39a\") " Sep 29 09:47:48 crc kubenswrapper[4779]: I0929 09:47:48.704181 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/30b112ba-d459-4229-a0d5-a327cdf7d39a-ovsdbserver-nb\") pod \"30b112ba-d459-4229-a0d5-a327cdf7d39a\" (UID: \"30b112ba-d459-4229-a0d5-a327cdf7d39a\") " Sep 29 09:47:48 crc kubenswrapper[4779]: I0929 09:47:48.704354 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/30b112ba-d459-4229-a0d5-a327cdf7d39a-ovsdbserver-sb\") pod \"30b112ba-d459-4229-a0d5-a327cdf7d39a\" (UID: \"30b112ba-d459-4229-a0d5-a327cdf7d39a\") " Sep 29 09:47:48 crc kubenswrapper[4779]: I0929 09:47:48.704389 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/30b112ba-d459-4229-a0d5-a327cdf7d39a-config\") pod \"30b112ba-d459-4229-a0d5-a327cdf7d39a\" (UID: \"30b112ba-d459-4229-a0d5-a327cdf7d39a\") " Sep 29 09:47:48 crc kubenswrapper[4779]: I0929 09:47:48.704496 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j49nf\" (UniqueName: \"kubernetes.io/projected/30b112ba-d459-4229-a0d5-a327cdf7d39a-kube-api-access-j49nf\") pod \"30b112ba-d459-4229-a0d5-a327cdf7d39a\" (UID: \"30b112ba-d459-4229-a0d5-a327cdf7d39a\") " Sep 29 09:47:48 crc kubenswrapper[4779]: I0929 09:47:48.710737 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/30b112ba-d459-4229-a0d5-a327cdf7d39a-kube-api-access-j49nf" (OuterVolumeSpecName: "kube-api-access-j49nf") pod "30b112ba-d459-4229-a0d5-a327cdf7d39a" (UID: "30b112ba-d459-4229-a0d5-a327cdf7d39a"). InnerVolumeSpecName "kube-api-access-j49nf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:47:48 crc kubenswrapper[4779]: I0929 09:47:48.754644 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/30b112ba-d459-4229-a0d5-a327cdf7d39a-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "30b112ba-d459-4229-a0d5-a327cdf7d39a" (UID: "30b112ba-d459-4229-a0d5-a327cdf7d39a"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:47:48 crc kubenswrapper[4779]: I0929 09:47:48.768269 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/30b112ba-d459-4229-a0d5-a327cdf7d39a-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "30b112ba-d459-4229-a0d5-a327cdf7d39a" (UID: "30b112ba-d459-4229-a0d5-a327cdf7d39a"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:47:48 crc kubenswrapper[4779]: I0929 09:47:48.775658 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/30b112ba-d459-4229-a0d5-a327cdf7d39a-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "30b112ba-d459-4229-a0d5-a327cdf7d39a" (UID: "30b112ba-d459-4229-a0d5-a327cdf7d39a"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:47:48 crc kubenswrapper[4779]: I0929 09:47:48.781779 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/30b112ba-d459-4229-a0d5-a327cdf7d39a-config" (OuterVolumeSpecName: "config") pod "30b112ba-d459-4229-a0d5-a327cdf7d39a" (UID: "30b112ba-d459-4229-a0d5-a327cdf7d39a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:47:48 crc kubenswrapper[4779]: I0929 09:47:48.809092 4779 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/30b112ba-d459-4229-a0d5-a327cdf7d39a-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:48 crc kubenswrapper[4779]: I0929 09:47:48.809396 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/30b112ba-d459-4229-a0d5-a327cdf7d39a-config\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:48 crc kubenswrapper[4779]: I0929 09:47:48.809409 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j49nf\" (UniqueName: \"kubernetes.io/projected/30b112ba-d459-4229-a0d5-a327cdf7d39a-kube-api-access-j49nf\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:48 crc kubenswrapper[4779]: I0929 09:47:48.809426 4779 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/30b112ba-d459-4229-a0d5-a327cdf7d39a-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:48 crc kubenswrapper[4779]: I0929 09:47:48.809437 4779 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/30b112ba-d459-4229-a0d5-a327cdf7d39a-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 09:47:49 crc kubenswrapper[4779]: I0929 09:47:49.515132 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-646f8b549-z5mvs" Sep 29 09:47:49 crc kubenswrapper[4779]: I0929 09:47:49.562468 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-646f8b549-z5mvs"] Sep 29 09:47:49 crc kubenswrapper[4779]: I0929 09:47:49.577623 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-646f8b549-z5mvs"] Sep 29 09:47:50 crc kubenswrapper[4779]: I0929 09:47:50.166777 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Sep 29 09:47:50 crc kubenswrapper[4779]: E0929 09:47:50.168521 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45" containerName="barbican-api" Sep 29 09:47:50 crc kubenswrapper[4779]: I0929 09:47:50.168640 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45" containerName="barbican-api" Sep 29 09:47:50 crc kubenswrapper[4779]: E0929 09:47:50.168742 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45" containerName="barbican-api-log" Sep 29 09:47:50 crc kubenswrapper[4779]: I0929 09:47:50.168819 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45" containerName="barbican-api-log" Sep 29 09:47:50 crc kubenswrapper[4779]: E0929 09:47:50.168942 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30b112ba-d459-4229-a0d5-a327cdf7d39a" containerName="init" Sep 29 09:47:50 crc kubenswrapper[4779]: I0929 09:47:50.169028 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="30b112ba-d459-4229-a0d5-a327cdf7d39a" containerName="init" Sep 29 09:47:50 crc kubenswrapper[4779]: E0929 09:47:50.169124 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30b112ba-d459-4229-a0d5-a327cdf7d39a" containerName="dnsmasq-dns" Sep 29 09:47:50 crc kubenswrapper[4779]: I0929 09:47:50.169200 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="30b112ba-d459-4229-a0d5-a327cdf7d39a" containerName="dnsmasq-dns" Sep 29 09:47:50 crc kubenswrapper[4779]: I0929 09:47:50.169503 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="30b112ba-d459-4229-a0d5-a327cdf7d39a" containerName="dnsmasq-dns" Sep 29 09:47:50 crc kubenswrapper[4779]: I0929 09:47:50.169600 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45" containerName="barbican-api" Sep 29 09:47:50 crc kubenswrapper[4779]: I0929 09:47:50.169687 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd8d2bbf-a1bd-4bc6-8212-91eceefdfb45" containerName="barbican-api-log" Sep 29 09:47:50 crc kubenswrapper[4779]: I0929 09:47:50.170777 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 29 09:47:50 crc kubenswrapper[4779]: I0929 09:47:50.174803 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Sep 29 09:47:50 crc kubenswrapper[4779]: I0929 09:47:50.175260 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Sep 29 09:47:50 crc kubenswrapper[4779]: I0929 09:47:50.175546 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-pngsj" Sep 29 09:47:50 crc kubenswrapper[4779]: I0929 09:47:50.183086 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Sep 29 09:47:50 crc kubenswrapper[4779]: I0929 09:47:50.234570 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2bksj\" (UniqueName: \"kubernetes.io/projected/ad6adeb1-9606-4cd7-bc31-2ed062ade161-kube-api-access-2bksj\") pod \"openstackclient\" (UID: \"ad6adeb1-9606-4cd7-bc31-2ed062ade161\") " pod="openstack/openstackclient" Sep 29 09:47:50 crc kubenswrapper[4779]: I0929 09:47:50.234622 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/ad6adeb1-9606-4cd7-bc31-2ed062ade161-openstack-config-secret\") pod \"openstackclient\" (UID: \"ad6adeb1-9606-4cd7-bc31-2ed062ade161\") " pod="openstack/openstackclient" Sep 29 09:47:50 crc kubenswrapper[4779]: I0929 09:47:50.234655 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad6adeb1-9606-4cd7-bc31-2ed062ade161-combined-ca-bundle\") pod \"openstackclient\" (UID: \"ad6adeb1-9606-4cd7-bc31-2ed062ade161\") " pod="openstack/openstackclient" Sep 29 09:47:50 crc kubenswrapper[4779]: I0929 09:47:50.234673 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/ad6adeb1-9606-4cd7-bc31-2ed062ade161-openstack-config\") pod \"openstackclient\" (UID: \"ad6adeb1-9606-4cd7-bc31-2ed062ade161\") " pod="openstack/openstackclient" Sep 29 09:47:50 crc kubenswrapper[4779]: I0929 09:47:50.336629 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2bksj\" (UniqueName: \"kubernetes.io/projected/ad6adeb1-9606-4cd7-bc31-2ed062ade161-kube-api-access-2bksj\") pod \"openstackclient\" (UID: \"ad6adeb1-9606-4cd7-bc31-2ed062ade161\") " pod="openstack/openstackclient" Sep 29 09:47:50 crc kubenswrapper[4779]: I0929 09:47:50.336676 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/ad6adeb1-9606-4cd7-bc31-2ed062ade161-openstack-config-secret\") pod \"openstackclient\" (UID: \"ad6adeb1-9606-4cd7-bc31-2ed062ade161\") " pod="openstack/openstackclient" Sep 29 09:47:50 crc kubenswrapper[4779]: I0929 09:47:50.336710 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad6adeb1-9606-4cd7-bc31-2ed062ade161-combined-ca-bundle\") pod \"openstackclient\" (UID: \"ad6adeb1-9606-4cd7-bc31-2ed062ade161\") " pod="openstack/openstackclient" Sep 29 09:47:50 crc kubenswrapper[4779]: I0929 09:47:50.336736 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/ad6adeb1-9606-4cd7-bc31-2ed062ade161-openstack-config\") pod \"openstackclient\" (UID: \"ad6adeb1-9606-4cd7-bc31-2ed062ade161\") " pod="openstack/openstackclient" Sep 29 09:47:50 crc kubenswrapper[4779]: I0929 09:47:50.337761 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/ad6adeb1-9606-4cd7-bc31-2ed062ade161-openstack-config\") pod \"openstackclient\" (UID: \"ad6adeb1-9606-4cd7-bc31-2ed062ade161\") " pod="openstack/openstackclient" Sep 29 09:47:50 crc kubenswrapper[4779]: I0929 09:47:50.342432 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad6adeb1-9606-4cd7-bc31-2ed062ade161-combined-ca-bundle\") pod \"openstackclient\" (UID: \"ad6adeb1-9606-4cd7-bc31-2ed062ade161\") " pod="openstack/openstackclient" Sep 29 09:47:50 crc kubenswrapper[4779]: I0929 09:47:50.343473 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/ad6adeb1-9606-4cd7-bc31-2ed062ade161-openstack-config-secret\") pod \"openstackclient\" (UID: \"ad6adeb1-9606-4cd7-bc31-2ed062ade161\") " pod="openstack/openstackclient" Sep 29 09:47:50 crc kubenswrapper[4779]: I0929 09:47:50.356501 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2bksj\" (UniqueName: \"kubernetes.io/projected/ad6adeb1-9606-4cd7-bc31-2ed062ade161-kube-api-access-2bksj\") pod \"openstackclient\" (UID: \"ad6adeb1-9606-4cd7-bc31-2ed062ade161\") " pod="openstack/openstackclient" Sep 29 09:47:50 crc kubenswrapper[4779]: I0929 09:47:50.487743 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 29 09:47:50 crc kubenswrapper[4779]: I0929 09:47:50.733851 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="30b112ba-d459-4229-a0d5-a327cdf7d39a" path="/var/lib/kubelet/pods/30b112ba-d459-4229-a0d5-a327cdf7d39a/volumes" Sep 29 09:47:50 crc kubenswrapper[4779]: I0929 09:47:50.967932 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Sep 29 09:47:50 crc kubenswrapper[4779]: W0929 09:47:50.969540 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podad6adeb1_9606_4cd7_bc31_2ed062ade161.slice/crio-8d2fd063d2a3c8d9cbd1e8bd50709b97af40d7a091fed3bed92fa03678e7a825 WatchSource:0}: Error finding container 8d2fd063d2a3c8d9cbd1e8bd50709b97af40d7a091fed3bed92fa03678e7a825: Status 404 returned error can't find the container with id 8d2fd063d2a3c8d9cbd1e8bd50709b97af40d7a091fed3bed92fa03678e7a825 Sep 29 09:47:51 crc kubenswrapper[4779]: I0929 09:47:51.535285 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"ad6adeb1-9606-4cd7-bc31-2ed062ade161","Type":"ContainerStarted","Data":"8d2fd063d2a3c8d9cbd1e8bd50709b97af40d7a091fed3bed92fa03678e7a825"} Sep 29 09:47:55 crc kubenswrapper[4779]: I0929 09:47:55.716137 4779 scope.go:117] "RemoveContainer" containerID="57d6ed8fa30263cb34a23159118310a096e08b89d83acaef3afa9ecc7a1a48e3" Sep 29 09:47:58 crc kubenswrapper[4779]: I0929 09:47:58.193510 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-j9cwn"] Sep 29 09:47:58 crc kubenswrapper[4779]: I0929 09:47:58.195958 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-j9cwn" Sep 29 09:47:58 crc kubenswrapper[4779]: I0929 09:47:58.217198 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-j9cwn"] Sep 29 09:47:58 crc kubenswrapper[4779]: I0929 09:47:58.266381 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-czmrh"] Sep 29 09:47:58 crc kubenswrapper[4779]: I0929 09:47:58.268084 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-czmrh" Sep 29 09:47:58 crc kubenswrapper[4779]: I0929 09:47:58.281817 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-czmrh"] Sep 29 09:47:58 crc kubenswrapper[4779]: I0929 09:47:58.287915 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9qsb8\" (UniqueName: \"kubernetes.io/projected/18171c09-ccc7-45fd-955c-b3dc2c7eff55-kube-api-access-9qsb8\") pod \"nova-api-db-create-j9cwn\" (UID: \"18171c09-ccc7-45fd-955c-b3dc2c7eff55\") " pod="openstack/nova-api-db-create-j9cwn" Sep 29 09:47:58 crc kubenswrapper[4779]: I0929 09:47:58.389656 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9qsb8\" (UniqueName: \"kubernetes.io/projected/18171c09-ccc7-45fd-955c-b3dc2c7eff55-kube-api-access-9qsb8\") pod \"nova-api-db-create-j9cwn\" (UID: \"18171c09-ccc7-45fd-955c-b3dc2c7eff55\") " pod="openstack/nova-api-db-create-j9cwn" Sep 29 09:47:58 crc kubenswrapper[4779]: I0929 09:47:58.389803 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6dbct\" (UniqueName: \"kubernetes.io/projected/4f48c0b6-5c74-4587-b8b5-47e7681dd657-kube-api-access-6dbct\") pod \"nova-cell0-db-create-czmrh\" (UID: \"4f48c0b6-5c74-4587-b8b5-47e7681dd657\") " pod="openstack/nova-cell0-db-create-czmrh" Sep 29 09:47:58 crc kubenswrapper[4779]: I0929 09:47:58.410891 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9qsb8\" (UniqueName: \"kubernetes.io/projected/18171c09-ccc7-45fd-955c-b3dc2c7eff55-kube-api-access-9qsb8\") pod \"nova-api-db-create-j9cwn\" (UID: \"18171c09-ccc7-45fd-955c-b3dc2c7eff55\") " pod="openstack/nova-api-db-create-j9cwn" Sep 29 09:47:58 crc kubenswrapper[4779]: I0929 09:47:58.467264 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-8rht4"] Sep 29 09:47:58 crc kubenswrapper[4779]: I0929 09:47:58.468784 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-8rht4" Sep 29 09:47:58 crc kubenswrapper[4779]: I0929 09:47:58.490868 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-8rht4"] Sep 29 09:47:58 crc kubenswrapper[4779]: I0929 09:47:58.491043 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6dbct\" (UniqueName: \"kubernetes.io/projected/4f48c0b6-5c74-4587-b8b5-47e7681dd657-kube-api-access-6dbct\") pod \"nova-cell0-db-create-czmrh\" (UID: \"4f48c0b6-5c74-4587-b8b5-47e7681dd657\") " pod="openstack/nova-cell0-db-create-czmrh" Sep 29 09:47:58 crc kubenswrapper[4779]: I0929 09:47:58.517418 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6dbct\" (UniqueName: \"kubernetes.io/projected/4f48c0b6-5c74-4587-b8b5-47e7681dd657-kube-api-access-6dbct\") pod \"nova-cell0-db-create-czmrh\" (UID: \"4f48c0b6-5c74-4587-b8b5-47e7681dd657\") " pod="openstack/nova-cell0-db-create-czmrh" Sep 29 09:47:58 crc kubenswrapper[4779]: I0929 09:47:58.523311 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-j9cwn" Sep 29 09:47:58 crc kubenswrapper[4779]: I0929 09:47:58.590964 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-czmrh" Sep 29 09:47:58 crc kubenswrapper[4779]: I0929 09:47:58.592894 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-57fmw\" (UniqueName: \"kubernetes.io/projected/a9163f19-a7b2-4128-bcd9-e37cd9ff7782-kube-api-access-57fmw\") pod \"nova-cell1-db-create-8rht4\" (UID: \"a9163f19-a7b2-4128-bcd9-e37cd9ff7782\") " pod="openstack/nova-cell1-db-create-8rht4" Sep 29 09:47:58 crc kubenswrapper[4779]: I0929 09:47:58.694196 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-57fmw\" (UniqueName: \"kubernetes.io/projected/a9163f19-a7b2-4128-bcd9-e37cd9ff7782-kube-api-access-57fmw\") pod \"nova-cell1-db-create-8rht4\" (UID: \"a9163f19-a7b2-4128-bcd9-e37cd9ff7782\") " pod="openstack/nova-cell1-db-create-8rht4" Sep 29 09:47:58 crc kubenswrapper[4779]: I0929 09:47:58.727730 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-57fmw\" (UniqueName: \"kubernetes.io/projected/a9163f19-a7b2-4128-bcd9-e37cd9ff7782-kube-api-access-57fmw\") pod \"nova-cell1-db-create-8rht4\" (UID: \"a9163f19-a7b2-4128-bcd9-e37cd9ff7782\") " pod="openstack/nova-cell1-db-create-8rht4" Sep 29 09:47:58 crc kubenswrapper[4779]: I0929 09:47:58.794260 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-8rht4" Sep 29 09:47:59 crc kubenswrapper[4779]: I0929 09:47:59.091361 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-j9cwn"] Sep 29 09:47:59 crc kubenswrapper[4779]: I0929 09:47:59.269357 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-czmrh"] Sep 29 09:47:59 crc kubenswrapper[4779]: I0929 09:47:59.361129 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-8rht4"] Sep 29 09:47:59 crc kubenswrapper[4779]: W0929 09:47:59.370951 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda9163f19_a7b2_4128_bcd9_e37cd9ff7782.slice/crio-b68f71b48087bd7c60aea6a49c147fe19f0a8b688e49d6c21f7f9c2b8f0aefb9 WatchSource:0}: Error finding container b68f71b48087bd7c60aea6a49c147fe19f0a8b688e49d6c21f7f9c2b8f0aefb9: Status 404 returned error can't find the container with id b68f71b48087bd7c60aea6a49c147fe19f0a8b688e49d6c21f7f9c2b8f0aefb9 Sep 29 09:47:59 crc kubenswrapper[4779]: I0929 09:47:59.564437 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 09:47:59 crc kubenswrapper[4779]: I0929 09:47:59.565052 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c32af95a-c3b1-4cba-9c05-d5d787e3ec04" containerName="ceilometer-central-agent" containerID="cri-o://e132c7884661a71449a8ca42797b2fcc7c921ab27e35e1294492aabf7d6af489" gracePeriod=30 Sep 29 09:47:59 crc kubenswrapper[4779]: I0929 09:47:59.565093 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c32af95a-c3b1-4cba-9c05-d5d787e3ec04" containerName="proxy-httpd" containerID="cri-o://9ecb8ba733adbb9319cc45d5b0664bfa8e1d1a9c530ef2da2a257366e545445e" gracePeriod=30 Sep 29 09:47:59 crc kubenswrapper[4779]: I0929 09:47:59.565187 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c32af95a-c3b1-4cba-9c05-d5d787e3ec04" containerName="ceilometer-notification-agent" containerID="cri-o://8ee03d0fb3f77380679450859183037f4b24ad48cbf1272456089a76f2a92df9" gracePeriod=30 Sep 29 09:47:59 crc kubenswrapper[4779]: I0929 09:47:59.565074 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c32af95a-c3b1-4cba-9c05-d5d787e3ec04" containerName="sg-core" containerID="cri-o://4fe0a80938eae031d5916629d48b0f1da3935242de12693ce390ac59b8bb30e0" gracePeriod=30 Sep 29 09:47:59 crc kubenswrapper[4779]: I0929 09:47:59.576582 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="c32af95a-c3b1-4cba-9c05-d5d787e3ec04" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.169:3000/\": EOF" Sep 29 09:47:59 crc kubenswrapper[4779]: I0929 09:47:59.608924 4779 generic.go:334] "Generic (PLEG): container finished" podID="4f48c0b6-5c74-4587-b8b5-47e7681dd657" containerID="4ea8f59e35b1f420e76dc43cd662b6ac1390288dc40631948ed056bc890a5c98" exitCode=0 Sep 29 09:47:59 crc kubenswrapper[4779]: I0929 09:47:59.608975 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-czmrh" event={"ID":"4f48c0b6-5c74-4587-b8b5-47e7681dd657","Type":"ContainerDied","Data":"4ea8f59e35b1f420e76dc43cd662b6ac1390288dc40631948ed056bc890a5c98"} Sep 29 09:47:59 crc kubenswrapper[4779]: I0929 09:47:59.608999 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-czmrh" event={"ID":"4f48c0b6-5c74-4587-b8b5-47e7681dd657","Type":"ContainerStarted","Data":"f40ff459a8959c759a1d9a491eae2717a3777769848a986b463bb35406012493"} Sep 29 09:47:59 crc kubenswrapper[4779]: I0929 09:47:59.610540 4779 generic.go:334] "Generic (PLEG): container finished" podID="18171c09-ccc7-45fd-955c-b3dc2c7eff55" containerID="128377e4bc174d0c0bb389eadf07dc574fb2c01709164303e64993b254a70634" exitCode=0 Sep 29 09:47:59 crc kubenswrapper[4779]: I0929 09:47:59.610579 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-j9cwn" event={"ID":"18171c09-ccc7-45fd-955c-b3dc2c7eff55","Type":"ContainerDied","Data":"128377e4bc174d0c0bb389eadf07dc574fb2c01709164303e64993b254a70634"} Sep 29 09:47:59 crc kubenswrapper[4779]: I0929 09:47:59.610595 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-j9cwn" event={"ID":"18171c09-ccc7-45fd-955c-b3dc2c7eff55","Type":"ContainerStarted","Data":"869de735cfd0b1898e41c3b73503707a690320f5f5a28f9208c4cff1bd2718e9"} Sep 29 09:47:59 crc kubenswrapper[4779]: I0929 09:47:59.613242 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"ad6adeb1-9606-4cd7-bc31-2ed062ade161","Type":"ContainerStarted","Data":"e487bf4262f52500fe480fc458faf9dd46ad584bc73e441e813cc43c6b245a22"} Sep 29 09:47:59 crc kubenswrapper[4779]: I0929 09:47:59.615654 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"f67e636b-969b-48ee-bbec-3d8b38b22274","Type":"ContainerStarted","Data":"04ec2cc73b2c81ba077c7fc00e233e5df22795dfb270435052946778c9421ed3"} Sep 29 09:47:59 crc kubenswrapper[4779]: I0929 09:47:59.618098 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-8rht4" event={"ID":"a9163f19-a7b2-4128-bcd9-e37cd9ff7782","Type":"ContainerStarted","Data":"b68f71b48087bd7c60aea6a49c147fe19f0a8b688e49d6c21f7f9c2b8f0aefb9"} Sep 29 09:47:59 crc kubenswrapper[4779]: I0929 09:47:59.673591 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=1.981659512 podStartE2EDuration="9.673574183s" podCreationTimestamp="2025-09-29 09:47:50 +0000 UTC" firstStartedPulling="2025-09-29 09:47:50.971684735 +0000 UTC m=+1102.953008639" lastFinishedPulling="2025-09-29 09:47:58.663599406 +0000 UTC m=+1110.644923310" observedRunningTime="2025-09-29 09:47:59.672358978 +0000 UTC m=+1111.653682882" watchObservedRunningTime="2025-09-29 09:47:59.673574183 +0000 UTC m=+1111.654898087" Sep 29 09:48:00 crc kubenswrapper[4779]: I0929 09:48:00.629729 4779 generic.go:334] "Generic (PLEG): container finished" podID="c32af95a-c3b1-4cba-9c05-d5d787e3ec04" containerID="9ecb8ba733adbb9319cc45d5b0664bfa8e1d1a9c530ef2da2a257366e545445e" exitCode=0 Sep 29 09:48:00 crc kubenswrapper[4779]: I0929 09:48:00.630084 4779 generic.go:334] "Generic (PLEG): container finished" podID="c32af95a-c3b1-4cba-9c05-d5d787e3ec04" containerID="4fe0a80938eae031d5916629d48b0f1da3935242de12693ce390ac59b8bb30e0" exitCode=2 Sep 29 09:48:00 crc kubenswrapper[4779]: I0929 09:48:00.630094 4779 generic.go:334] "Generic (PLEG): container finished" podID="c32af95a-c3b1-4cba-9c05-d5d787e3ec04" containerID="e132c7884661a71449a8ca42797b2fcc7c921ab27e35e1294492aabf7d6af489" exitCode=0 Sep 29 09:48:00 crc kubenswrapper[4779]: I0929 09:48:00.630146 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c32af95a-c3b1-4cba-9c05-d5d787e3ec04","Type":"ContainerDied","Data":"9ecb8ba733adbb9319cc45d5b0664bfa8e1d1a9c530ef2da2a257366e545445e"} Sep 29 09:48:00 crc kubenswrapper[4779]: I0929 09:48:00.630172 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c32af95a-c3b1-4cba-9c05-d5d787e3ec04","Type":"ContainerDied","Data":"4fe0a80938eae031d5916629d48b0f1da3935242de12693ce390ac59b8bb30e0"} Sep 29 09:48:00 crc kubenswrapper[4779]: I0929 09:48:00.630180 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c32af95a-c3b1-4cba-9c05-d5d787e3ec04","Type":"ContainerDied","Data":"e132c7884661a71449a8ca42797b2fcc7c921ab27e35e1294492aabf7d6af489"} Sep 29 09:48:00 crc kubenswrapper[4779]: I0929 09:48:00.632030 4779 generic.go:334] "Generic (PLEG): container finished" podID="a9163f19-a7b2-4128-bcd9-e37cd9ff7782" containerID="1950c9fb9a81ebcb3bf6701a0cd72136a49bca222f173ffd526ab1e05a9d9b1a" exitCode=0 Sep 29 09:48:00 crc kubenswrapper[4779]: I0929 09:48:00.632103 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-8rht4" event={"ID":"a9163f19-a7b2-4128-bcd9-e37cd9ff7782","Type":"ContainerDied","Data":"1950c9fb9a81ebcb3bf6701a0cd72136a49bca222f173ffd526ab1e05a9d9b1a"} Sep 29 09:48:01 crc kubenswrapper[4779]: I0929 09:48:01.167734 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-czmrh" Sep 29 09:48:01 crc kubenswrapper[4779]: I0929 09:48:01.177496 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-j9cwn" Sep 29 09:48:01 crc kubenswrapper[4779]: I0929 09:48:01.250621 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6dbct\" (UniqueName: \"kubernetes.io/projected/4f48c0b6-5c74-4587-b8b5-47e7681dd657-kube-api-access-6dbct\") pod \"4f48c0b6-5c74-4587-b8b5-47e7681dd657\" (UID: \"4f48c0b6-5c74-4587-b8b5-47e7681dd657\") " Sep 29 09:48:01 crc kubenswrapper[4779]: I0929 09:48:01.250687 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9qsb8\" (UniqueName: \"kubernetes.io/projected/18171c09-ccc7-45fd-955c-b3dc2c7eff55-kube-api-access-9qsb8\") pod \"18171c09-ccc7-45fd-955c-b3dc2c7eff55\" (UID: \"18171c09-ccc7-45fd-955c-b3dc2c7eff55\") " Sep 29 09:48:01 crc kubenswrapper[4779]: I0929 09:48:01.266167 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f48c0b6-5c74-4587-b8b5-47e7681dd657-kube-api-access-6dbct" (OuterVolumeSpecName: "kube-api-access-6dbct") pod "4f48c0b6-5c74-4587-b8b5-47e7681dd657" (UID: "4f48c0b6-5c74-4587-b8b5-47e7681dd657"). InnerVolumeSpecName "kube-api-access-6dbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:48:01 crc kubenswrapper[4779]: I0929 09:48:01.266362 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/18171c09-ccc7-45fd-955c-b3dc2c7eff55-kube-api-access-9qsb8" (OuterVolumeSpecName: "kube-api-access-9qsb8") pod "18171c09-ccc7-45fd-955c-b3dc2c7eff55" (UID: "18171c09-ccc7-45fd-955c-b3dc2c7eff55"). InnerVolumeSpecName "kube-api-access-9qsb8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:48:01 crc kubenswrapper[4779]: I0929 09:48:01.353021 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6dbct\" (UniqueName: \"kubernetes.io/projected/4f48c0b6-5c74-4587-b8b5-47e7681dd657-kube-api-access-6dbct\") on node \"crc\" DevicePath \"\"" Sep 29 09:48:01 crc kubenswrapper[4779]: I0929 09:48:01.353263 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9qsb8\" (UniqueName: \"kubernetes.io/projected/18171c09-ccc7-45fd-955c-b3dc2c7eff55-kube-api-access-9qsb8\") on node \"crc\" DevicePath \"\"" Sep 29 09:48:01 crc kubenswrapper[4779]: I0929 09:48:01.644299 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-czmrh" Sep 29 09:48:01 crc kubenswrapper[4779]: I0929 09:48:01.645033 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-czmrh" event={"ID":"4f48c0b6-5c74-4587-b8b5-47e7681dd657","Type":"ContainerDied","Data":"f40ff459a8959c759a1d9a491eae2717a3777769848a986b463bb35406012493"} Sep 29 09:48:01 crc kubenswrapper[4779]: I0929 09:48:01.645078 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f40ff459a8959c759a1d9a491eae2717a3777769848a986b463bb35406012493" Sep 29 09:48:01 crc kubenswrapper[4779]: I0929 09:48:01.646948 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-j9cwn" Sep 29 09:48:01 crc kubenswrapper[4779]: I0929 09:48:01.646957 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-j9cwn" event={"ID":"18171c09-ccc7-45fd-955c-b3dc2c7eff55","Type":"ContainerDied","Data":"869de735cfd0b1898e41c3b73503707a690320f5f5a28f9208c4cff1bd2718e9"} Sep 29 09:48:01 crc kubenswrapper[4779]: I0929 09:48:01.647025 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="869de735cfd0b1898e41c3b73503707a690320f5f5a28f9208c4cff1bd2718e9" Sep 29 09:48:01 crc kubenswrapper[4779]: I0929 09:48:01.651716 4779 generic.go:334] "Generic (PLEG): container finished" podID="c32af95a-c3b1-4cba-9c05-d5d787e3ec04" containerID="8ee03d0fb3f77380679450859183037f4b24ad48cbf1272456089a76f2a92df9" exitCode=0 Sep 29 09:48:01 crc kubenswrapper[4779]: I0929 09:48:01.651876 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c32af95a-c3b1-4cba-9c05-d5d787e3ec04","Type":"ContainerDied","Data":"8ee03d0fb3f77380679450859183037f4b24ad48cbf1272456089a76f2a92df9"} Sep 29 09:48:01 crc kubenswrapper[4779]: I0929 09:48:01.834003 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 09:48:01 crc kubenswrapper[4779]: I0929 09:48:01.962989 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kbhgt\" (UniqueName: \"kubernetes.io/projected/c32af95a-c3b1-4cba-9c05-d5d787e3ec04-kube-api-access-kbhgt\") pod \"c32af95a-c3b1-4cba-9c05-d5d787e3ec04\" (UID: \"c32af95a-c3b1-4cba-9c05-d5d787e3ec04\") " Sep 29 09:48:01 crc kubenswrapper[4779]: I0929 09:48:01.963074 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c32af95a-c3b1-4cba-9c05-d5d787e3ec04-config-data\") pod \"c32af95a-c3b1-4cba-9c05-d5d787e3ec04\" (UID: \"c32af95a-c3b1-4cba-9c05-d5d787e3ec04\") " Sep 29 09:48:01 crc kubenswrapper[4779]: I0929 09:48:01.963141 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c32af95a-c3b1-4cba-9c05-d5d787e3ec04-run-httpd\") pod \"c32af95a-c3b1-4cba-9c05-d5d787e3ec04\" (UID: \"c32af95a-c3b1-4cba-9c05-d5d787e3ec04\") " Sep 29 09:48:01 crc kubenswrapper[4779]: I0929 09:48:01.963195 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c32af95a-c3b1-4cba-9c05-d5d787e3ec04-sg-core-conf-yaml\") pod \"c32af95a-c3b1-4cba-9c05-d5d787e3ec04\" (UID: \"c32af95a-c3b1-4cba-9c05-d5d787e3ec04\") " Sep 29 09:48:01 crc kubenswrapper[4779]: I0929 09:48:01.963271 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c32af95a-c3b1-4cba-9c05-d5d787e3ec04-scripts\") pod \"c32af95a-c3b1-4cba-9c05-d5d787e3ec04\" (UID: \"c32af95a-c3b1-4cba-9c05-d5d787e3ec04\") " Sep 29 09:48:01 crc kubenswrapper[4779]: I0929 09:48:01.963333 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c32af95a-c3b1-4cba-9c05-d5d787e3ec04-log-httpd\") pod \"c32af95a-c3b1-4cba-9c05-d5d787e3ec04\" (UID: \"c32af95a-c3b1-4cba-9c05-d5d787e3ec04\") " Sep 29 09:48:01 crc kubenswrapper[4779]: I0929 09:48:01.963386 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c32af95a-c3b1-4cba-9c05-d5d787e3ec04-combined-ca-bundle\") pod \"c32af95a-c3b1-4cba-9c05-d5d787e3ec04\" (UID: \"c32af95a-c3b1-4cba-9c05-d5d787e3ec04\") " Sep 29 09:48:01 crc kubenswrapper[4779]: I0929 09:48:01.963854 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c32af95a-c3b1-4cba-9c05-d5d787e3ec04-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "c32af95a-c3b1-4cba-9c05-d5d787e3ec04" (UID: "c32af95a-c3b1-4cba-9c05-d5d787e3ec04"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:48:01 crc kubenswrapper[4779]: I0929 09:48:01.965653 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c32af95a-c3b1-4cba-9c05-d5d787e3ec04-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "c32af95a-c3b1-4cba-9c05-d5d787e3ec04" (UID: "c32af95a-c3b1-4cba-9c05-d5d787e3ec04"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:48:01 crc kubenswrapper[4779]: I0929 09:48:01.967729 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c32af95a-c3b1-4cba-9c05-d5d787e3ec04-kube-api-access-kbhgt" (OuterVolumeSpecName: "kube-api-access-kbhgt") pod "c32af95a-c3b1-4cba-9c05-d5d787e3ec04" (UID: "c32af95a-c3b1-4cba-9c05-d5d787e3ec04"). InnerVolumeSpecName "kube-api-access-kbhgt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:48:01 crc kubenswrapper[4779]: I0929 09:48:01.968103 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c32af95a-c3b1-4cba-9c05-d5d787e3ec04-scripts" (OuterVolumeSpecName: "scripts") pod "c32af95a-c3b1-4cba-9c05-d5d787e3ec04" (UID: "c32af95a-c3b1-4cba-9c05-d5d787e3ec04"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:48:02 crc kubenswrapper[4779]: I0929 09:48:02.002477 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-8rht4" Sep 29 09:48:02 crc kubenswrapper[4779]: I0929 09:48:02.013735 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c32af95a-c3b1-4cba-9c05-d5d787e3ec04-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "c32af95a-c3b1-4cba-9c05-d5d787e3ec04" (UID: "c32af95a-c3b1-4cba-9c05-d5d787e3ec04"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:48:02 crc kubenswrapper[4779]: I0929 09:48:02.080088 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-57fmw\" (UniqueName: \"kubernetes.io/projected/a9163f19-a7b2-4128-bcd9-e37cd9ff7782-kube-api-access-57fmw\") pod \"a9163f19-a7b2-4128-bcd9-e37cd9ff7782\" (UID: \"a9163f19-a7b2-4128-bcd9-e37cd9ff7782\") " Sep 29 09:48:02 crc kubenswrapper[4779]: I0929 09:48:02.080570 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kbhgt\" (UniqueName: \"kubernetes.io/projected/c32af95a-c3b1-4cba-9c05-d5d787e3ec04-kube-api-access-kbhgt\") on node \"crc\" DevicePath \"\"" Sep 29 09:48:02 crc kubenswrapper[4779]: I0929 09:48:02.080590 4779 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c32af95a-c3b1-4cba-9c05-d5d787e3ec04-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 09:48:02 crc kubenswrapper[4779]: I0929 09:48:02.080601 4779 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c32af95a-c3b1-4cba-9c05-d5d787e3ec04-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 29 09:48:02 crc kubenswrapper[4779]: I0929 09:48:02.080611 4779 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c32af95a-c3b1-4cba-9c05-d5d787e3ec04-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 09:48:02 crc kubenswrapper[4779]: I0929 09:48:02.080621 4779 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c32af95a-c3b1-4cba-9c05-d5d787e3ec04-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 09:48:02 crc kubenswrapper[4779]: I0929 09:48:02.083892 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a9163f19-a7b2-4128-bcd9-e37cd9ff7782-kube-api-access-57fmw" (OuterVolumeSpecName: "kube-api-access-57fmw") pod "a9163f19-a7b2-4128-bcd9-e37cd9ff7782" (UID: "a9163f19-a7b2-4128-bcd9-e37cd9ff7782"). InnerVolumeSpecName "kube-api-access-57fmw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:48:02 crc kubenswrapper[4779]: I0929 09:48:02.091051 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c32af95a-c3b1-4cba-9c05-d5d787e3ec04-config-data" (OuterVolumeSpecName: "config-data") pod "c32af95a-c3b1-4cba-9c05-d5d787e3ec04" (UID: "c32af95a-c3b1-4cba-9c05-d5d787e3ec04"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:48:02 crc kubenswrapper[4779]: I0929 09:48:02.123066 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c32af95a-c3b1-4cba-9c05-d5d787e3ec04-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c32af95a-c3b1-4cba-9c05-d5d787e3ec04" (UID: "c32af95a-c3b1-4cba-9c05-d5d787e3ec04"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:48:02 crc kubenswrapper[4779]: I0929 09:48:02.182348 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c32af95a-c3b1-4cba-9c05-d5d787e3ec04-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 09:48:02 crc kubenswrapper[4779]: I0929 09:48:02.182379 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c32af95a-c3b1-4cba-9c05-d5d787e3ec04-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 09:48:02 crc kubenswrapper[4779]: I0929 09:48:02.182390 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-57fmw\" (UniqueName: \"kubernetes.io/projected/a9163f19-a7b2-4128-bcd9-e37cd9ff7782-kube-api-access-57fmw\") on node \"crc\" DevicePath \"\"" Sep 29 09:48:02 crc kubenswrapper[4779]: I0929 09:48:02.664605 4779 generic.go:334] "Generic (PLEG): container finished" podID="f67e636b-969b-48ee-bbec-3d8b38b22274" containerID="04ec2cc73b2c81ba077c7fc00e233e5df22795dfb270435052946778c9421ed3" exitCode=1 Sep 29 09:48:02 crc kubenswrapper[4779]: I0929 09:48:02.664697 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"f67e636b-969b-48ee-bbec-3d8b38b22274","Type":"ContainerDied","Data":"04ec2cc73b2c81ba077c7fc00e233e5df22795dfb270435052946778c9421ed3"} Sep 29 09:48:02 crc kubenswrapper[4779]: I0929 09:48:02.664935 4779 scope.go:117] "RemoveContainer" containerID="57d6ed8fa30263cb34a23159118310a096e08b89d83acaef3afa9ecc7a1a48e3" Sep 29 09:48:02 crc kubenswrapper[4779]: I0929 09:48:02.666038 4779 scope.go:117] "RemoveContainer" containerID="04ec2cc73b2c81ba077c7fc00e233e5df22795dfb270435052946778c9421ed3" Sep 29 09:48:02 crc kubenswrapper[4779]: E0929 09:48:02.666452 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-decision-engine\" with CrashLoopBackOff: \"back-off 40s restarting failed container=watcher-decision-engine pod=watcher-decision-engine-0_openstack(f67e636b-969b-48ee-bbec-3d8b38b22274)\"" pod="openstack/watcher-decision-engine-0" podUID="f67e636b-969b-48ee-bbec-3d8b38b22274" Sep 29 09:48:02 crc kubenswrapper[4779]: I0929 09:48:02.676306 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-8rht4" Sep 29 09:48:02 crc kubenswrapper[4779]: I0929 09:48:02.676325 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-8rht4" event={"ID":"a9163f19-a7b2-4128-bcd9-e37cd9ff7782","Type":"ContainerDied","Data":"b68f71b48087bd7c60aea6a49c147fe19f0a8b688e49d6c21f7f9c2b8f0aefb9"} Sep 29 09:48:02 crc kubenswrapper[4779]: I0929 09:48:02.676379 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b68f71b48087bd7c60aea6a49c147fe19f0a8b688e49d6c21f7f9c2b8f0aefb9" Sep 29 09:48:02 crc kubenswrapper[4779]: I0929 09:48:02.688030 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c32af95a-c3b1-4cba-9c05-d5d787e3ec04","Type":"ContainerDied","Data":"765d444a85f29eca98436cf013dbb2cc977503d2e6173b64bec7cd5c634036cb"} Sep 29 09:48:02 crc kubenswrapper[4779]: I0929 09:48:02.688150 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 09:48:02 crc kubenswrapper[4779]: I0929 09:48:02.770175 4779 scope.go:117] "RemoveContainer" containerID="9ecb8ba733adbb9319cc45d5b0664bfa8e1d1a9c530ef2da2a257366e545445e" Sep 29 09:48:02 crc kubenswrapper[4779]: I0929 09:48:02.780989 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 09:48:02 crc kubenswrapper[4779]: I0929 09:48:02.792434 4779 scope.go:117] "RemoveContainer" containerID="4fe0a80938eae031d5916629d48b0f1da3935242de12693ce390ac59b8bb30e0" Sep 29 09:48:02 crc kubenswrapper[4779]: I0929 09:48:02.792888 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 29 09:48:02 crc kubenswrapper[4779]: I0929 09:48:02.809071 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 09:48:02 crc kubenswrapper[4779]: E0929 09:48:02.810567 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c32af95a-c3b1-4cba-9c05-d5d787e3ec04" containerName="sg-core" Sep 29 09:48:02 crc kubenswrapper[4779]: I0929 09:48:02.810634 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="c32af95a-c3b1-4cba-9c05-d5d787e3ec04" containerName="sg-core" Sep 29 09:48:02 crc kubenswrapper[4779]: E0929 09:48:02.810653 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f48c0b6-5c74-4587-b8b5-47e7681dd657" containerName="mariadb-database-create" Sep 29 09:48:02 crc kubenswrapper[4779]: I0929 09:48:02.810661 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f48c0b6-5c74-4587-b8b5-47e7681dd657" containerName="mariadb-database-create" Sep 29 09:48:02 crc kubenswrapper[4779]: E0929 09:48:02.810732 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c32af95a-c3b1-4cba-9c05-d5d787e3ec04" containerName="ceilometer-notification-agent" Sep 29 09:48:02 crc kubenswrapper[4779]: I0929 09:48:02.810742 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="c32af95a-c3b1-4cba-9c05-d5d787e3ec04" containerName="ceilometer-notification-agent" Sep 29 09:48:02 crc kubenswrapper[4779]: E0929 09:48:02.810789 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9163f19-a7b2-4128-bcd9-e37cd9ff7782" containerName="mariadb-database-create" Sep 29 09:48:02 crc kubenswrapper[4779]: I0929 09:48:02.810800 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9163f19-a7b2-4128-bcd9-e37cd9ff7782" containerName="mariadb-database-create" Sep 29 09:48:02 crc kubenswrapper[4779]: E0929 09:48:02.810965 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="18171c09-ccc7-45fd-955c-b3dc2c7eff55" containerName="mariadb-database-create" Sep 29 09:48:02 crc kubenswrapper[4779]: I0929 09:48:02.810978 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="18171c09-ccc7-45fd-955c-b3dc2c7eff55" containerName="mariadb-database-create" Sep 29 09:48:02 crc kubenswrapper[4779]: E0929 09:48:02.811035 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c32af95a-c3b1-4cba-9c05-d5d787e3ec04" containerName="ceilometer-central-agent" Sep 29 09:48:02 crc kubenswrapper[4779]: I0929 09:48:02.811046 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="c32af95a-c3b1-4cba-9c05-d5d787e3ec04" containerName="ceilometer-central-agent" Sep 29 09:48:02 crc kubenswrapper[4779]: E0929 09:48:02.811066 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c32af95a-c3b1-4cba-9c05-d5d787e3ec04" containerName="proxy-httpd" Sep 29 09:48:02 crc kubenswrapper[4779]: I0929 09:48:02.811076 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="c32af95a-c3b1-4cba-9c05-d5d787e3ec04" containerName="proxy-httpd" Sep 29 09:48:02 crc kubenswrapper[4779]: I0929 09:48:02.811497 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="a9163f19-a7b2-4128-bcd9-e37cd9ff7782" containerName="mariadb-database-create" Sep 29 09:48:02 crc kubenswrapper[4779]: I0929 09:48:02.811549 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="c32af95a-c3b1-4cba-9c05-d5d787e3ec04" containerName="ceilometer-notification-agent" Sep 29 09:48:02 crc kubenswrapper[4779]: I0929 09:48:02.811566 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f48c0b6-5c74-4587-b8b5-47e7681dd657" containerName="mariadb-database-create" Sep 29 09:48:02 crc kubenswrapper[4779]: I0929 09:48:02.811585 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="c32af95a-c3b1-4cba-9c05-d5d787e3ec04" containerName="sg-core" Sep 29 09:48:02 crc kubenswrapper[4779]: I0929 09:48:02.811620 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="18171c09-ccc7-45fd-955c-b3dc2c7eff55" containerName="mariadb-database-create" Sep 29 09:48:02 crc kubenswrapper[4779]: I0929 09:48:02.811669 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="c32af95a-c3b1-4cba-9c05-d5d787e3ec04" containerName="ceilometer-central-agent" Sep 29 09:48:02 crc kubenswrapper[4779]: I0929 09:48:02.811711 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="c32af95a-c3b1-4cba-9c05-d5d787e3ec04" containerName="proxy-httpd" Sep 29 09:48:02 crc kubenswrapper[4779]: I0929 09:48:02.814929 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 09:48:02 crc kubenswrapper[4779]: I0929 09:48:02.815048 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 09:48:02 crc kubenswrapper[4779]: I0929 09:48:02.817187 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 09:48:02 crc kubenswrapper[4779]: I0929 09:48:02.825996 4779 scope.go:117] "RemoveContainer" containerID="8ee03d0fb3f77380679450859183037f4b24ad48cbf1272456089a76f2a92df9" Sep 29 09:48:02 crc kubenswrapper[4779]: I0929 09:48:02.831140 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 09:48:02 crc kubenswrapper[4779]: I0929 09:48:02.852709 4779 scope.go:117] "RemoveContainer" containerID="e132c7884661a71449a8ca42797b2fcc7c921ab27e35e1294492aabf7d6af489" Sep 29 09:48:02 crc kubenswrapper[4779]: I0929 09:48:02.894419 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5fa70755-5735-495e-af6b-b0da222f1ae6-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5fa70755-5735-495e-af6b-b0da222f1ae6\") " pod="openstack/ceilometer-0" Sep 29 09:48:02 crc kubenswrapper[4779]: I0929 09:48:02.894521 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nmwr6\" (UniqueName: \"kubernetes.io/projected/5fa70755-5735-495e-af6b-b0da222f1ae6-kube-api-access-nmwr6\") pod \"ceilometer-0\" (UID: \"5fa70755-5735-495e-af6b-b0da222f1ae6\") " pod="openstack/ceilometer-0" Sep 29 09:48:02 crc kubenswrapper[4779]: I0929 09:48:02.894867 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5fa70755-5735-495e-af6b-b0da222f1ae6-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5fa70755-5735-495e-af6b-b0da222f1ae6\") " pod="openstack/ceilometer-0" Sep 29 09:48:02 crc kubenswrapper[4779]: I0929 09:48:02.894952 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5fa70755-5735-495e-af6b-b0da222f1ae6-run-httpd\") pod \"ceilometer-0\" (UID: \"5fa70755-5735-495e-af6b-b0da222f1ae6\") " pod="openstack/ceilometer-0" Sep 29 09:48:02 crc kubenswrapper[4779]: I0929 09:48:02.894971 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5fa70755-5735-495e-af6b-b0da222f1ae6-config-data\") pod \"ceilometer-0\" (UID: \"5fa70755-5735-495e-af6b-b0da222f1ae6\") " pod="openstack/ceilometer-0" Sep 29 09:48:02 crc kubenswrapper[4779]: I0929 09:48:02.895008 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5fa70755-5735-495e-af6b-b0da222f1ae6-log-httpd\") pod \"ceilometer-0\" (UID: \"5fa70755-5735-495e-af6b-b0da222f1ae6\") " pod="openstack/ceilometer-0" Sep 29 09:48:02 crc kubenswrapper[4779]: I0929 09:48:02.895159 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5fa70755-5735-495e-af6b-b0da222f1ae6-scripts\") pod \"ceilometer-0\" (UID: \"5fa70755-5735-495e-af6b-b0da222f1ae6\") " pod="openstack/ceilometer-0" Sep 29 09:48:02 crc kubenswrapper[4779]: I0929 09:48:02.996943 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5fa70755-5735-495e-af6b-b0da222f1ae6-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5fa70755-5735-495e-af6b-b0da222f1ae6\") " pod="openstack/ceilometer-0" Sep 29 09:48:02 crc kubenswrapper[4779]: I0929 09:48:02.997045 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nmwr6\" (UniqueName: \"kubernetes.io/projected/5fa70755-5735-495e-af6b-b0da222f1ae6-kube-api-access-nmwr6\") pod \"ceilometer-0\" (UID: \"5fa70755-5735-495e-af6b-b0da222f1ae6\") " pod="openstack/ceilometer-0" Sep 29 09:48:02 crc kubenswrapper[4779]: I0929 09:48:02.997085 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5fa70755-5735-495e-af6b-b0da222f1ae6-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5fa70755-5735-495e-af6b-b0da222f1ae6\") " pod="openstack/ceilometer-0" Sep 29 09:48:02 crc kubenswrapper[4779]: I0929 09:48:02.997150 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5fa70755-5735-495e-af6b-b0da222f1ae6-run-httpd\") pod \"ceilometer-0\" (UID: \"5fa70755-5735-495e-af6b-b0da222f1ae6\") " pod="openstack/ceilometer-0" Sep 29 09:48:02 crc kubenswrapper[4779]: I0929 09:48:02.997165 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5fa70755-5735-495e-af6b-b0da222f1ae6-config-data\") pod \"ceilometer-0\" (UID: \"5fa70755-5735-495e-af6b-b0da222f1ae6\") " pod="openstack/ceilometer-0" Sep 29 09:48:02 crc kubenswrapper[4779]: I0929 09:48:02.997194 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5fa70755-5735-495e-af6b-b0da222f1ae6-log-httpd\") pod \"ceilometer-0\" (UID: \"5fa70755-5735-495e-af6b-b0da222f1ae6\") " pod="openstack/ceilometer-0" Sep 29 09:48:02 crc kubenswrapper[4779]: I0929 09:48:02.997226 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5fa70755-5735-495e-af6b-b0da222f1ae6-scripts\") pod \"ceilometer-0\" (UID: \"5fa70755-5735-495e-af6b-b0da222f1ae6\") " pod="openstack/ceilometer-0" Sep 29 09:48:02 crc kubenswrapper[4779]: I0929 09:48:02.997685 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5fa70755-5735-495e-af6b-b0da222f1ae6-run-httpd\") pod \"ceilometer-0\" (UID: \"5fa70755-5735-495e-af6b-b0da222f1ae6\") " pod="openstack/ceilometer-0" Sep 29 09:48:02 crc kubenswrapper[4779]: I0929 09:48:02.998282 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5fa70755-5735-495e-af6b-b0da222f1ae6-log-httpd\") pod \"ceilometer-0\" (UID: \"5fa70755-5735-495e-af6b-b0da222f1ae6\") " pod="openstack/ceilometer-0" Sep 29 09:48:03 crc kubenswrapper[4779]: I0929 09:48:03.006280 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5fa70755-5735-495e-af6b-b0da222f1ae6-scripts\") pod \"ceilometer-0\" (UID: \"5fa70755-5735-495e-af6b-b0da222f1ae6\") " pod="openstack/ceilometer-0" Sep 29 09:48:03 crc kubenswrapper[4779]: I0929 09:48:03.007110 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5fa70755-5735-495e-af6b-b0da222f1ae6-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5fa70755-5735-495e-af6b-b0da222f1ae6\") " pod="openstack/ceilometer-0" Sep 29 09:48:03 crc kubenswrapper[4779]: I0929 09:48:03.007919 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 09:48:03 crc kubenswrapper[4779]: I0929 09:48:03.017234 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5fa70755-5735-495e-af6b-b0da222f1ae6-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5fa70755-5735-495e-af6b-b0da222f1ae6\") " pod="openstack/ceilometer-0" Sep 29 09:48:03 crc kubenswrapper[4779]: I0929 09:48:03.018081 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nmwr6\" (UniqueName: \"kubernetes.io/projected/5fa70755-5735-495e-af6b-b0da222f1ae6-kube-api-access-nmwr6\") pod \"ceilometer-0\" (UID: \"5fa70755-5735-495e-af6b-b0da222f1ae6\") " pod="openstack/ceilometer-0" Sep 29 09:48:03 crc kubenswrapper[4779]: I0929 09:48:03.015074 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5fa70755-5735-495e-af6b-b0da222f1ae6-config-data\") pod \"ceilometer-0\" (UID: \"5fa70755-5735-495e-af6b-b0da222f1ae6\") " pod="openstack/ceilometer-0" Sep 29 09:48:03 crc kubenswrapper[4779]: E0929 09:48:03.033447 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[config-data kube-api-access-nmwr6 sg-core-conf-yaml], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/ceilometer-0" podUID="5fa70755-5735-495e-af6b-b0da222f1ae6" Sep 29 09:48:03 crc kubenswrapper[4779]: I0929 09:48:03.707436 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 09:48:03 crc kubenswrapper[4779]: I0929 09:48:03.723274 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 09:48:03 crc kubenswrapper[4779]: I0929 09:48:03.809165 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5fa70755-5735-495e-af6b-b0da222f1ae6-run-httpd\") pod \"5fa70755-5735-495e-af6b-b0da222f1ae6\" (UID: \"5fa70755-5735-495e-af6b-b0da222f1ae6\") " Sep 29 09:48:03 crc kubenswrapper[4779]: I0929 09:48:03.809208 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5fa70755-5735-495e-af6b-b0da222f1ae6-sg-core-conf-yaml\") pod \"5fa70755-5735-495e-af6b-b0da222f1ae6\" (UID: \"5fa70755-5735-495e-af6b-b0da222f1ae6\") " Sep 29 09:48:03 crc kubenswrapper[4779]: I0929 09:48:03.809293 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5fa70755-5735-495e-af6b-b0da222f1ae6-scripts\") pod \"5fa70755-5735-495e-af6b-b0da222f1ae6\" (UID: \"5fa70755-5735-495e-af6b-b0da222f1ae6\") " Sep 29 09:48:03 crc kubenswrapper[4779]: I0929 09:48:03.809336 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5fa70755-5735-495e-af6b-b0da222f1ae6-log-httpd\") pod \"5fa70755-5735-495e-af6b-b0da222f1ae6\" (UID: \"5fa70755-5735-495e-af6b-b0da222f1ae6\") " Sep 29 09:48:03 crc kubenswrapper[4779]: I0929 09:48:03.809378 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nmwr6\" (UniqueName: \"kubernetes.io/projected/5fa70755-5735-495e-af6b-b0da222f1ae6-kube-api-access-nmwr6\") pod \"5fa70755-5735-495e-af6b-b0da222f1ae6\" (UID: \"5fa70755-5735-495e-af6b-b0da222f1ae6\") " Sep 29 09:48:03 crc kubenswrapper[4779]: I0929 09:48:03.809433 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5fa70755-5735-495e-af6b-b0da222f1ae6-config-data\") pod \"5fa70755-5735-495e-af6b-b0da222f1ae6\" (UID: \"5fa70755-5735-495e-af6b-b0da222f1ae6\") " Sep 29 09:48:03 crc kubenswrapper[4779]: I0929 09:48:03.809481 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5fa70755-5735-495e-af6b-b0da222f1ae6-combined-ca-bundle\") pod \"5fa70755-5735-495e-af6b-b0da222f1ae6\" (UID: \"5fa70755-5735-495e-af6b-b0da222f1ae6\") " Sep 29 09:48:03 crc kubenswrapper[4779]: I0929 09:48:03.811359 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5fa70755-5735-495e-af6b-b0da222f1ae6-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "5fa70755-5735-495e-af6b-b0da222f1ae6" (UID: "5fa70755-5735-495e-af6b-b0da222f1ae6"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:48:03 crc kubenswrapper[4779]: I0929 09:48:03.811958 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5fa70755-5735-495e-af6b-b0da222f1ae6-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "5fa70755-5735-495e-af6b-b0da222f1ae6" (UID: "5fa70755-5735-495e-af6b-b0da222f1ae6"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:48:03 crc kubenswrapper[4779]: I0929 09:48:03.814926 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fa70755-5735-495e-af6b-b0da222f1ae6-scripts" (OuterVolumeSpecName: "scripts") pod "5fa70755-5735-495e-af6b-b0da222f1ae6" (UID: "5fa70755-5735-495e-af6b-b0da222f1ae6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:48:03 crc kubenswrapper[4779]: I0929 09:48:03.817228 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fa70755-5735-495e-af6b-b0da222f1ae6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5fa70755-5735-495e-af6b-b0da222f1ae6" (UID: "5fa70755-5735-495e-af6b-b0da222f1ae6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:48:03 crc kubenswrapper[4779]: I0929 09:48:03.819122 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fa70755-5735-495e-af6b-b0da222f1ae6-kube-api-access-nmwr6" (OuterVolumeSpecName: "kube-api-access-nmwr6") pod "5fa70755-5735-495e-af6b-b0da222f1ae6" (UID: "5fa70755-5735-495e-af6b-b0da222f1ae6"). InnerVolumeSpecName "kube-api-access-nmwr6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:48:03 crc kubenswrapper[4779]: I0929 09:48:03.819170 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fa70755-5735-495e-af6b-b0da222f1ae6-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "5fa70755-5735-495e-af6b-b0da222f1ae6" (UID: "5fa70755-5735-495e-af6b-b0da222f1ae6"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:48:03 crc kubenswrapper[4779]: I0929 09:48:03.819597 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fa70755-5735-495e-af6b-b0da222f1ae6-config-data" (OuterVolumeSpecName: "config-data") pod "5fa70755-5735-495e-af6b-b0da222f1ae6" (UID: "5fa70755-5735-495e-af6b-b0da222f1ae6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:48:03 crc kubenswrapper[4779]: I0929 09:48:03.911439 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5fa70755-5735-495e-af6b-b0da222f1ae6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 09:48:03 crc kubenswrapper[4779]: I0929 09:48:03.911487 4779 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5fa70755-5735-495e-af6b-b0da222f1ae6-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 09:48:03 crc kubenswrapper[4779]: I0929 09:48:03.911499 4779 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5fa70755-5735-495e-af6b-b0da222f1ae6-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 29 09:48:03 crc kubenswrapper[4779]: I0929 09:48:03.911510 4779 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5fa70755-5735-495e-af6b-b0da222f1ae6-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 09:48:03 crc kubenswrapper[4779]: I0929 09:48:03.911531 4779 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5fa70755-5735-495e-af6b-b0da222f1ae6-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 09:48:03 crc kubenswrapper[4779]: I0929 09:48:03.911541 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nmwr6\" (UniqueName: \"kubernetes.io/projected/5fa70755-5735-495e-af6b-b0da222f1ae6-kube-api-access-nmwr6\") on node \"crc\" DevicePath \"\"" Sep 29 09:48:03 crc kubenswrapper[4779]: I0929 09:48:03.911554 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5fa70755-5735-495e-af6b-b0da222f1ae6-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 09:48:04 crc kubenswrapper[4779]: I0929 09:48:04.291940 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Sep 29 09:48:04 crc kubenswrapper[4779]: I0929 09:48:04.291998 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Sep 29 09:48:04 crc kubenswrapper[4779]: I0929 09:48:04.292637 4779 scope.go:117] "RemoveContainer" containerID="04ec2cc73b2c81ba077c7fc00e233e5df22795dfb270435052946778c9421ed3" Sep 29 09:48:04 crc kubenswrapper[4779]: E0929 09:48:04.292858 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-decision-engine\" with CrashLoopBackOff: \"back-off 40s restarting failed container=watcher-decision-engine pod=watcher-decision-engine-0_openstack(f67e636b-969b-48ee-bbec-3d8b38b22274)\"" pod="openstack/watcher-decision-engine-0" podUID="f67e636b-969b-48ee-bbec-3d8b38b22274" Sep 29 09:48:04 crc kubenswrapper[4779]: I0929 09:48:04.716736 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 09:48:04 crc kubenswrapper[4779]: I0929 09:48:04.748226 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c32af95a-c3b1-4cba-9c05-d5d787e3ec04" path="/var/lib/kubelet/pods/c32af95a-c3b1-4cba-9c05-d5d787e3ec04/volumes" Sep 29 09:48:04 crc kubenswrapper[4779]: I0929 09:48:04.787352 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 09:48:04 crc kubenswrapper[4779]: I0929 09:48:04.801864 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 29 09:48:04 crc kubenswrapper[4779]: I0929 09:48:04.808418 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 09:48:04 crc kubenswrapper[4779]: I0929 09:48:04.810502 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 09:48:04 crc kubenswrapper[4779]: I0929 09:48:04.814940 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 09:48:04 crc kubenswrapper[4779]: I0929 09:48:04.815235 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 09:48:04 crc kubenswrapper[4779]: I0929 09:48:04.849009 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 09:48:04 crc kubenswrapper[4779]: I0929 09:48:04.931637 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-46h2n\" (UniqueName: \"kubernetes.io/projected/dd0896b7-c098-42f8-bad0-78f58eefb79e-kube-api-access-46h2n\") pod \"ceilometer-0\" (UID: \"dd0896b7-c098-42f8-bad0-78f58eefb79e\") " pod="openstack/ceilometer-0" Sep 29 09:48:04 crc kubenswrapper[4779]: I0929 09:48:04.931792 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dd0896b7-c098-42f8-bad0-78f58eefb79e-scripts\") pod \"ceilometer-0\" (UID: \"dd0896b7-c098-42f8-bad0-78f58eefb79e\") " pod="openstack/ceilometer-0" Sep 29 09:48:04 crc kubenswrapper[4779]: I0929 09:48:04.931841 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dd0896b7-c098-42f8-bad0-78f58eefb79e-log-httpd\") pod \"ceilometer-0\" (UID: \"dd0896b7-c098-42f8-bad0-78f58eefb79e\") " pod="openstack/ceilometer-0" Sep 29 09:48:04 crc kubenswrapper[4779]: I0929 09:48:04.931862 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/dd0896b7-c098-42f8-bad0-78f58eefb79e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"dd0896b7-c098-42f8-bad0-78f58eefb79e\") " pod="openstack/ceilometer-0" Sep 29 09:48:04 crc kubenswrapper[4779]: I0929 09:48:04.931960 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd0896b7-c098-42f8-bad0-78f58eefb79e-config-data\") pod \"ceilometer-0\" (UID: \"dd0896b7-c098-42f8-bad0-78f58eefb79e\") " pod="openstack/ceilometer-0" Sep 29 09:48:04 crc kubenswrapper[4779]: I0929 09:48:04.932029 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd0896b7-c098-42f8-bad0-78f58eefb79e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"dd0896b7-c098-42f8-bad0-78f58eefb79e\") " pod="openstack/ceilometer-0" Sep 29 09:48:04 crc kubenswrapper[4779]: I0929 09:48:04.932081 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dd0896b7-c098-42f8-bad0-78f58eefb79e-run-httpd\") pod \"ceilometer-0\" (UID: \"dd0896b7-c098-42f8-bad0-78f58eefb79e\") " pod="openstack/ceilometer-0" Sep 29 09:48:05 crc kubenswrapper[4779]: I0929 09:48:05.033896 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd0896b7-c098-42f8-bad0-78f58eefb79e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"dd0896b7-c098-42f8-bad0-78f58eefb79e\") " pod="openstack/ceilometer-0" Sep 29 09:48:05 crc kubenswrapper[4779]: I0929 09:48:05.033979 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dd0896b7-c098-42f8-bad0-78f58eefb79e-run-httpd\") pod \"ceilometer-0\" (UID: \"dd0896b7-c098-42f8-bad0-78f58eefb79e\") " pod="openstack/ceilometer-0" Sep 29 09:48:05 crc kubenswrapper[4779]: I0929 09:48:05.034040 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-46h2n\" (UniqueName: \"kubernetes.io/projected/dd0896b7-c098-42f8-bad0-78f58eefb79e-kube-api-access-46h2n\") pod \"ceilometer-0\" (UID: \"dd0896b7-c098-42f8-bad0-78f58eefb79e\") " pod="openstack/ceilometer-0" Sep 29 09:48:05 crc kubenswrapper[4779]: I0929 09:48:05.034103 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dd0896b7-c098-42f8-bad0-78f58eefb79e-scripts\") pod \"ceilometer-0\" (UID: \"dd0896b7-c098-42f8-bad0-78f58eefb79e\") " pod="openstack/ceilometer-0" Sep 29 09:48:05 crc kubenswrapper[4779]: I0929 09:48:05.034130 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dd0896b7-c098-42f8-bad0-78f58eefb79e-log-httpd\") pod \"ceilometer-0\" (UID: \"dd0896b7-c098-42f8-bad0-78f58eefb79e\") " pod="openstack/ceilometer-0" Sep 29 09:48:05 crc kubenswrapper[4779]: I0929 09:48:05.034174 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/dd0896b7-c098-42f8-bad0-78f58eefb79e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"dd0896b7-c098-42f8-bad0-78f58eefb79e\") " pod="openstack/ceilometer-0" Sep 29 09:48:05 crc kubenswrapper[4779]: I0929 09:48:05.034206 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd0896b7-c098-42f8-bad0-78f58eefb79e-config-data\") pod \"ceilometer-0\" (UID: \"dd0896b7-c098-42f8-bad0-78f58eefb79e\") " pod="openstack/ceilometer-0" Sep 29 09:48:05 crc kubenswrapper[4779]: I0929 09:48:05.034551 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dd0896b7-c098-42f8-bad0-78f58eefb79e-run-httpd\") pod \"ceilometer-0\" (UID: \"dd0896b7-c098-42f8-bad0-78f58eefb79e\") " pod="openstack/ceilometer-0" Sep 29 09:48:05 crc kubenswrapper[4779]: I0929 09:48:05.034579 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dd0896b7-c098-42f8-bad0-78f58eefb79e-log-httpd\") pod \"ceilometer-0\" (UID: \"dd0896b7-c098-42f8-bad0-78f58eefb79e\") " pod="openstack/ceilometer-0" Sep 29 09:48:05 crc kubenswrapper[4779]: I0929 09:48:05.039132 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd0896b7-c098-42f8-bad0-78f58eefb79e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"dd0896b7-c098-42f8-bad0-78f58eefb79e\") " pod="openstack/ceilometer-0" Sep 29 09:48:05 crc kubenswrapper[4779]: I0929 09:48:05.039153 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/dd0896b7-c098-42f8-bad0-78f58eefb79e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"dd0896b7-c098-42f8-bad0-78f58eefb79e\") " pod="openstack/ceilometer-0" Sep 29 09:48:05 crc kubenswrapper[4779]: I0929 09:48:05.041312 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd0896b7-c098-42f8-bad0-78f58eefb79e-config-data\") pod \"ceilometer-0\" (UID: \"dd0896b7-c098-42f8-bad0-78f58eefb79e\") " pod="openstack/ceilometer-0" Sep 29 09:48:05 crc kubenswrapper[4779]: I0929 09:48:05.046418 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dd0896b7-c098-42f8-bad0-78f58eefb79e-scripts\") pod \"ceilometer-0\" (UID: \"dd0896b7-c098-42f8-bad0-78f58eefb79e\") " pod="openstack/ceilometer-0" Sep 29 09:48:05 crc kubenswrapper[4779]: I0929 09:48:05.058979 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-46h2n\" (UniqueName: \"kubernetes.io/projected/dd0896b7-c098-42f8-bad0-78f58eefb79e-kube-api-access-46h2n\") pod \"ceilometer-0\" (UID: \"dd0896b7-c098-42f8-bad0-78f58eefb79e\") " pod="openstack/ceilometer-0" Sep 29 09:48:05 crc kubenswrapper[4779]: I0929 09:48:05.145617 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 09:48:05 crc kubenswrapper[4779]: W0929 09:48:05.638462 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddd0896b7_c098_42f8_bad0_78f58eefb79e.slice/crio-22e42c6a1aec0629bdfa4fccce8cf3a3b79e630f5c425431b917d47abee1f42e WatchSource:0}: Error finding container 22e42c6a1aec0629bdfa4fccce8cf3a3b79e630f5c425431b917d47abee1f42e: Status 404 returned error can't find the container with id 22e42c6a1aec0629bdfa4fccce8cf3a3b79e630f5c425431b917d47abee1f42e Sep 29 09:48:05 crc kubenswrapper[4779]: I0929 09:48:05.647622 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 09:48:05 crc kubenswrapper[4779]: I0929 09:48:05.734259 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dd0896b7-c098-42f8-bad0-78f58eefb79e","Type":"ContainerStarted","Data":"22e42c6a1aec0629bdfa4fccce8cf3a3b79e630f5c425431b917d47abee1f42e"} Sep 29 09:48:06 crc kubenswrapper[4779]: I0929 09:48:06.725057 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fa70755-5735-495e-af6b-b0da222f1ae6" path="/var/lib/kubelet/pods/5fa70755-5735-495e-af6b-b0da222f1ae6/volumes" Sep 29 09:48:06 crc kubenswrapper[4779]: I0929 09:48:06.745849 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dd0896b7-c098-42f8-bad0-78f58eefb79e","Type":"ContainerStarted","Data":"2164e7bbc210d5fb99af1192bbdb956984a5150c7df52de4fedce3bc5b7b7c09"} Sep 29 09:48:06 crc kubenswrapper[4779]: I0929 09:48:06.745887 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dd0896b7-c098-42f8-bad0-78f58eefb79e","Type":"ContainerStarted","Data":"18b11631f9c05eab44444c644864d047854414eb4e040d4211d5a9892193246d"} Sep 29 09:48:07 crc kubenswrapper[4779]: I0929 09:48:07.758465 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dd0896b7-c098-42f8-bad0-78f58eefb79e","Type":"ContainerStarted","Data":"371cff0619dea199c652ff9f2978b60193d463885afce0e0dc6445b4c578a171"} Sep 29 09:48:07 crc kubenswrapper[4779]: I0929 09:48:07.997690 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-7fb8cfdfb-ff7cr" Sep 29 09:48:08 crc kubenswrapper[4779]: I0929 09:48:08.285409 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-6336-account-create-9wth8"] Sep 29 09:48:08 crc kubenswrapper[4779]: I0929 09:48:08.286485 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-6336-account-create-9wth8" Sep 29 09:48:08 crc kubenswrapper[4779]: I0929 09:48:08.289221 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Sep 29 09:48:08 crc kubenswrapper[4779]: I0929 09:48:08.313197 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-6336-account-create-9wth8"] Sep 29 09:48:08 crc kubenswrapper[4779]: I0929 09:48:08.400724 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wsshk\" (UniqueName: \"kubernetes.io/projected/00efaa87-60ad-4589-93fc-560b8acf0d5a-kube-api-access-wsshk\") pod \"nova-api-6336-account-create-9wth8\" (UID: \"00efaa87-60ad-4589-93fc-560b8acf0d5a\") " pod="openstack/nova-api-6336-account-create-9wth8" Sep 29 09:48:08 crc kubenswrapper[4779]: I0929 09:48:08.485798 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-8747-account-create-44d8p"] Sep 29 09:48:08 crc kubenswrapper[4779]: I0929 09:48:08.487235 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-8747-account-create-44d8p" Sep 29 09:48:08 crc kubenswrapper[4779]: I0929 09:48:08.492205 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Sep 29 09:48:08 crc kubenswrapper[4779]: I0929 09:48:08.498537 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-8747-account-create-44d8p"] Sep 29 09:48:08 crc kubenswrapper[4779]: I0929 09:48:08.501916 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wsshk\" (UniqueName: \"kubernetes.io/projected/00efaa87-60ad-4589-93fc-560b8acf0d5a-kube-api-access-wsshk\") pod \"nova-api-6336-account-create-9wth8\" (UID: \"00efaa87-60ad-4589-93fc-560b8acf0d5a\") " pod="openstack/nova-api-6336-account-create-9wth8" Sep 29 09:48:08 crc kubenswrapper[4779]: I0929 09:48:08.535152 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wsshk\" (UniqueName: \"kubernetes.io/projected/00efaa87-60ad-4589-93fc-560b8acf0d5a-kube-api-access-wsshk\") pod \"nova-api-6336-account-create-9wth8\" (UID: \"00efaa87-60ad-4589-93fc-560b8acf0d5a\") " pod="openstack/nova-api-6336-account-create-9wth8" Sep 29 09:48:08 crc kubenswrapper[4779]: I0929 09:48:08.603592 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hvbzp\" (UniqueName: \"kubernetes.io/projected/32ffe9bf-069f-4ba3-972b-abb6917e29b3-kube-api-access-hvbzp\") pod \"nova-cell0-8747-account-create-44d8p\" (UID: \"32ffe9bf-069f-4ba3-972b-abb6917e29b3\") " pod="openstack/nova-cell0-8747-account-create-44d8p" Sep 29 09:48:08 crc kubenswrapper[4779]: I0929 09:48:08.612342 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-6336-account-create-9wth8" Sep 29 09:48:08 crc kubenswrapper[4779]: I0929 09:48:08.687107 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-4152-account-create-7zn2j"] Sep 29 09:48:08 crc kubenswrapper[4779]: I0929 09:48:08.689050 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-4152-account-create-7zn2j" Sep 29 09:48:08 crc kubenswrapper[4779]: I0929 09:48:08.691335 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Sep 29 09:48:08 crc kubenswrapper[4779]: I0929 09:48:08.696452 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-4152-account-create-7zn2j"] Sep 29 09:48:08 crc kubenswrapper[4779]: I0929 09:48:08.704994 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hvbzp\" (UniqueName: \"kubernetes.io/projected/32ffe9bf-069f-4ba3-972b-abb6917e29b3-kube-api-access-hvbzp\") pod \"nova-cell0-8747-account-create-44d8p\" (UID: \"32ffe9bf-069f-4ba3-972b-abb6917e29b3\") " pod="openstack/nova-cell0-8747-account-create-44d8p" Sep 29 09:48:08 crc kubenswrapper[4779]: I0929 09:48:08.729442 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hvbzp\" (UniqueName: \"kubernetes.io/projected/32ffe9bf-069f-4ba3-972b-abb6917e29b3-kube-api-access-hvbzp\") pod \"nova-cell0-8747-account-create-44d8p\" (UID: \"32ffe9bf-069f-4ba3-972b-abb6917e29b3\") " pod="openstack/nova-cell0-8747-account-create-44d8p" Sep 29 09:48:08 crc kubenswrapper[4779]: I0929 09:48:08.773176 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dd0896b7-c098-42f8-bad0-78f58eefb79e","Type":"ContainerStarted","Data":"90cda883268c180dad806a29cc500ccc707a72d684119dc8afeafd96a2e35277"} Sep 29 09:48:08 crc kubenswrapper[4779]: I0929 09:48:08.773371 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 09:48:08 crc kubenswrapper[4779]: I0929 09:48:08.793272 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.377757996 podStartE2EDuration="4.793254047s" podCreationTimestamp="2025-09-29 09:48:04 +0000 UTC" firstStartedPulling="2025-09-29 09:48:05.640808802 +0000 UTC m=+1117.622132706" lastFinishedPulling="2025-09-29 09:48:08.056304843 +0000 UTC m=+1120.037628757" observedRunningTime="2025-09-29 09:48:08.789197631 +0000 UTC m=+1120.770521535" watchObservedRunningTime="2025-09-29 09:48:08.793254047 +0000 UTC m=+1120.774577951" Sep 29 09:48:08 crc kubenswrapper[4779]: I0929 09:48:08.802432 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-8747-account-create-44d8p" Sep 29 09:48:08 crc kubenswrapper[4779]: I0929 09:48:08.808708 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r2kqz\" (UniqueName: \"kubernetes.io/projected/1c4225e6-9b2d-4c40-a892-baa6290c8ebb-kube-api-access-r2kqz\") pod \"nova-cell1-4152-account-create-7zn2j\" (UID: \"1c4225e6-9b2d-4c40-a892-baa6290c8ebb\") " pod="openstack/nova-cell1-4152-account-create-7zn2j" Sep 29 09:48:08 crc kubenswrapper[4779]: I0929 09:48:08.911180 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r2kqz\" (UniqueName: \"kubernetes.io/projected/1c4225e6-9b2d-4c40-a892-baa6290c8ebb-kube-api-access-r2kqz\") pod \"nova-cell1-4152-account-create-7zn2j\" (UID: \"1c4225e6-9b2d-4c40-a892-baa6290c8ebb\") " pod="openstack/nova-cell1-4152-account-create-7zn2j" Sep 29 09:48:08 crc kubenswrapper[4779]: I0929 09:48:08.937399 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r2kqz\" (UniqueName: \"kubernetes.io/projected/1c4225e6-9b2d-4c40-a892-baa6290c8ebb-kube-api-access-r2kqz\") pod \"nova-cell1-4152-account-create-7zn2j\" (UID: \"1c4225e6-9b2d-4c40-a892-baa6290c8ebb\") " pod="openstack/nova-cell1-4152-account-create-7zn2j" Sep 29 09:48:09 crc kubenswrapper[4779]: I0929 09:48:09.016773 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-4152-account-create-7zn2j" Sep 29 09:48:09 crc kubenswrapper[4779]: I0929 09:48:09.075074 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-6336-account-create-9wth8"] Sep 29 09:48:09 crc kubenswrapper[4779]: W0929 09:48:09.094203 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod00efaa87_60ad_4589_93fc_560b8acf0d5a.slice/crio-ddee32ae4f0850b331c533ab01293ffc53873e3386adf93528d54208d7571031 WatchSource:0}: Error finding container ddee32ae4f0850b331c533ab01293ffc53873e3386adf93528d54208d7571031: Status 404 returned error can't find the container with id ddee32ae4f0850b331c533ab01293ffc53873e3386adf93528d54208d7571031 Sep 29 09:48:09 crc kubenswrapper[4779]: I0929 09:48:09.225913 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-8747-account-create-44d8p"] Sep 29 09:48:09 crc kubenswrapper[4779]: I0929 09:48:09.520939 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-4152-account-create-7zn2j"] Sep 29 09:48:09 crc kubenswrapper[4779]: W0929 09:48:09.546293 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1c4225e6_9b2d_4c40_a892_baa6290c8ebb.slice/crio-a5be457c75289022d6dd5d548e9ad79be5a0343f5bd523b61814619bf992bf69 WatchSource:0}: Error finding container a5be457c75289022d6dd5d548e9ad79be5a0343f5bd523b61814619bf992bf69: Status 404 returned error can't find the container with id a5be457c75289022d6dd5d548e9ad79be5a0343f5bd523b61814619bf992bf69 Sep 29 09:48:09 crc kubenswrapper[4779]: I0929 09:48:09.786522 4779 generic.go:334] "Generic (PLEG): container finished" podID="00efaa87-60ad-4589-93fc-560b8acf0d5a" containerID="227f8ca94312ba4ef34cc3bdddbd28879e4df36ad6a813a9b6d15ee0c9056416" exitCode=0 Sep 29 09:48:09 crc kubenswrapper[4779]: I0929 09:48:09.786744 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-6336-account-create-9wth8" event={"ID":"00efaa87-60ad-4589-93fc-560b8acf0d5a","Type":"ContainerDied","Data":"227f8ca94312ba4ef34cc3bdddbd28879e4df36ad6a813a9b6d15ee0c9056416"} Sep 29 09:48:09 crc kubenswrapper[4779]: I0929 09:48:09.786768 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-6336-account-create-9wth8" event={"ID":"00efaa87-60ad-4589-93fc-560b8acf0d5a","Type":"ContainerStarted","Data":"ddee32ae4f0850b331c533ab01293ffc53873e3386adf93528d54208d7571031"} Sep 29 09:48:09 crc kubenswrapper[4779]: I0929 09:48:09.803040 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-4152-account-create-7zn2j" event={"ID":"1c4225e6-9b2d-4c40-a892-baa6290c8ebb","Type":"ContainerStarted","Data":"a5be457c75289022d6dd5d548e9ad79be5a0343f5bd523b61814619bf992bf69"} Sep 29 09:48:09 crc kubenswrapper[4779]: I0929 09:48:09.806457 4779 generic.go:334] "Generic (PLEG): container finished" podID="32ffe9bf-069f-4ba3-972b-abb6917e29b3" containerID="231d94dc2be4321a64c5bbef5eacb99b78cbfdfa3a05541dd1f8966f6833b212" exitCode=0 Sep 29 09:48:09 crc kubenswrapper[4779]: I0929 09:48:09.806512 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-8747-account-create-44d8p" event={"ID":"32ffe9bf-069f-4ba3-972b-abb6917e29b3","Type":"ContainerDied","Data":"231d94dc2be4321a64c5bbef5eacb99b78cbfdfa3a05541dd1f8966f6833b212"} Sep 29 09:48:09 crc kubenswrapper[4779]: I0929 09:48:09.806540 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-8747-account-create-44d8p" event={"ID":"32ffe9bf-069f-4ba3-972b-abb6917e29b3","Type":"ContainerStarted","Data":"4c2da76501b6819bfe39a26ac6e9dd195cf5bf1dfd634d0779dd194fd3811b55"} Sep 29 09:48:10 crc kubenswrapper[4779]: I0929 09:48:10.818886 4779 generic.go:334] "Generic (PLEG): container finished" podID="1c4225e6-9b2d-4c40-a892-baa6290c8ebb" containerID="a6b35bca0bba3807d404b21d87521a4db912cccde982b52f8077fa8759559c91" exitCode=0 Sep 29 09:48:10 crc kubenswrapper[4779]: I0929 09:48:10.819726 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-4152-account-create-7zn2j" event={"ID":"1c4225e6-9b2d-4c40-a892-baa6290c8ebb","Type":"ContainerDied","Data":"a6b35bca0bba3807d404b21d87521a4db912cccde982b52f8077fa8759559c91"} Sep 29 09:48:11 crc kubenswrapper[4779]: I0929 09:48:11.165377 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-68487ccd79-zvm96" Sep 29 09:48:11 crc kubenswrapper[4779]: I0929 09:48:11.236119 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-7fb8cfdfb-ff7cr"] Sep 29 09:48:11 crc kubenswrapper[4779]: I0929 09:48:11.236367 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-7fb8cfdfb-ff7cr" podUID="c1f1c031-af73-4248-b0ee-6c9a8e6c2a82" containerName="neutron-api" containerID="cri-o://87be15ca283a6fc588a7296ec4ea350af474a0d635f62c88358442ff580a39de" gracePeriod=30 Sep 29 09:48:11 crc kubenswrapper[4779]: I0929 09:48:11.237314 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-7fb8cfdfb-ff7cr" podUID="c1f1c031-af73-4248-b0ee-6c9a8e6c2a82" containerName="neutron-httpd" containerID="cri-o://eaaf7c3292add81067ab0de1f5e10235575d9488e8459c9f440fd91e212eb934" gracePeriod=30 Sep 29 09:48:11 crc kubenswrapper[4779]: I0929 09:48:11.354192 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-6336-account-create-9wth8" Sep 29 09:48:11 crc kubenswrapper[4779]: I0929 09:48:11.465953 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wsshk\" (UniqueName: \"kubernetes.io/projected/00efaa87-60ad-4589-93fc-560b8acf0d5a-kube-api-access-wsshk\") pod \"00efaa87-60ad-4589-93fc-560b8acf0d5a\" (UID: \"00efaa87-60ad-4589-93fc-560b8acf0d5a\") " Sep 29 09:48:11 crc kubenswrapper[4779]: I0929 09:48:11.471991 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/00efaa87-60ad-4589-93fc-560b8acf0d5a-kube-api-access-wsshk" (OuterVolumeSpecName: "kube-api-access-wsshk") pod "00efaa87-60ad-4589-93fc-560b8acf0d5a" (UID: "00efaa87-60ad-4589-93fc-560b8acf0d5a"). InnerVolumeSpecName "kube-api-access-wsshk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:48:11 crc kubenswrapper[4779]: I0929 09:48:11.516163 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-8747-account-create-44d8p" Sep 29 09:48:11 crc kubenswrapper[4779]: I0929 09:48:11.567288 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hvbzp\" (UniqueName: \"kubernetes.io/projected/32ffe9bf-069f-4ba3-972b-abb6917e29b3-kube-api-access-hvbzp\") pod \"32ffe9bf-069f-4ba3-972b-abb6917e29b3\" (UID: \"32ffe9bf-069f-4ba3-972b-abb6917e29b3\") " Sep 29 09:48:11 crc kubenswrapper[4779]: I0929 09:48:11.567666 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wsshk\" (UniqueName: \"kubernetes.io/projected/00efaa87-60ad-4589-93fc-560b8acf0d5a-kube-api-access-wsshk\") on node \"crc\" DevicePath \"\"" Sep 29 09:48:11 crc kubenswrapper[4779]: I0929 09:48:11.571298 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/32ffe9bf-069f-4ba3-972b-abb6917e29b3-kube-api-access-hvbzp" (OuterVolumeSpecName: "kube-api-access-hvbzp") pod "32ffe9bf-069f-4ba3-972b-abb6917e29b3" (UID: "32ffe9bf-069f-4ba3-972b-abb6917e29b3"). InnerVolumeSpecName "kube-api-access-hvbzp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:48:11 crc kubenswrapper[4779]: I0929 09:48:11.669211 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hvbzp\" (UniqueName: \"kubernetes.io/projected/32ffe9bf-069f-4ba3-972b-abb6917e29b3-kube-api-access-hvbzp\") on node \"crc\" DevicePath \"\"" Sep 29 09:48:11 crc kubenswrapper[4779]: I0929 09:48:11.833688 4779 generic.go:334] "Generic (PLEG): container finished" podID="c1f1c031-af73-4248-b0ee-6c9a8e6c2a82" containerID="eaaf7c3292add81067ab0de1f5e10235575d9488e8459c9f440fd91e212eb934" exitCode=0 Sep 29 09:48:11 crc kubenswrapper[4779]: I0929 09:48:11.833773 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7fb8cfdfb-ff7cr" event={"ID":"c1f1c031-af73-4248-b0ee-6c9a8e6c2a82","Type":"ContainerDied","Data":"eaaf7c3292add81067ab0de1f5e10235575d9488e8459c9f440fd91e212eb934"} Sep 29 09:48:11 crc kubenswrapper[4779]: I0929 09:48:11.835507 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-8747-account-create-44d8p" Sep 29 09:48:11 crc kubenswrapper[4779]: I0929 09:48:11.835522 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-8747-account-create-44d8p" event={"ID":"32ffe9bf-069f-4ba3-972b-abb6917e29b3","Type":"ContainerDied","Data":"4c2da76501b6819bfe39a26ac6e9dd195cf5bf1dfd634d0779dd194fd3811b55"} Sep 29 09:48:11 crc kubenswrapper[4779]: I0929 09:48:11.835567 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4c2da76501b6819bfe39a26ac6e9dd195cf5bf1dfd634d0779dd194fd3811b55" Sep 29 09:48:11 crc kubenswrapper[4779]: I0929 09:48:11.836977 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-6336-account-create-9wth8" event={"ID":"00efaa87-60ad-4589-93fc-560b8acf0d5a","Type":"ContainerDied","Data":"ddee32ae4f0850b331c533ab01293ffc53873e3386adf93528d54208d7571031"} Sep 29 09:48:11 crc kubenswrapper[4779]: I0929 09:48:11.837025 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ddee32ae4f0850b331c533ab01293ffc53873e3386adf93528d54208d7571031" Sep 29 09:48:11 crc kubenswrapper[4779]: I0929 09:48:11.837055 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-6336-account-create-9wth8" Sep 29 09:48:12 crc kubenswrapper[4779]: I0929 09:48:12.167659 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-4152-account-create-7zn2j" Sep 29 09:48:12 crc kubenswrapper[4779]: I0929 09:48:12.279321 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r2kqz\" (UniqueName: \"kubernetes.io/projected/1c4225e6-9b2d-4c40-a892-baa6290c8ebb-kube-api-access-r2kqz\") pod \"1c4225e6-9b2d-4c40-a892-baa6290c8ebb\" (UID: \"1c4225e6-9b2d-4c40-a892-baa6290c8ebb\") " Sep 29 09:48:12 crc kubenswrapper[4779]: I0929 09:48:12.303185 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c4225e6-9b2d-4c40-a892-baa6290c8ebb-kube-api-access-r2kqz" (OuterVolumeSpecName: "kube-api-access-r2kqz") pod "1c4225e6-9b2d-4c40-a892-baa6290c8ebb" (UID: "1c4225e6-9b2d-4c40-a892-baa6290c8ebb"). InnerVolumeSpecName "kube-api-access-r2kqz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:48:12 crc kubenswrapper[4779]: I0929 09:48:12.380976 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r2kqz\" (UniqueName: \"kubernetes.io/projected/1c4225e6-9b2d-4c40-a892-baa6290c8ebb-kube-api-access-r2kqz\") on node \"crc\" DevicePath \"\"" Sep 29 09:48:12 crc kubenswrapper[4779]: I0929 09:48:12.846375 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-4152-account-create-7zn2j" event={"ID":"1c4225e6-9b2d-4c40-a892-baa6290c8ebb","Type":"ContainerDied","Data":"a5be457c75289022d6dd5d548e9ad79be5a0343f5bd523b61814619bf992bf69"} Sep 29 09:48:12 crc kubenswrapper[4779]: I0929 09:48:12.846421 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a5be457c75289022d6dd5d548e9ad79be5a0343f5bd523b61814619bf992bf69" Sep 29 09:48:12 crc kubenswrapper[4779]: I0929 09:48:12.846428 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-4152-account-create-7zn2j" Sep 29 09:48:12 crc kubenswrapper[4779]: I0929 09:48:12.988047 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 09:48:12 crc kubenswrapper[4779]: I0929 09:48:12.988324 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="dd0896b7-c098-42f8-bad0-78f58eefb79e" containerName="ceilometer-central-agent" containerID="cri-o://18b11631f9c05eab44444c644864d047854414eb4e040d4211d5a9892193246d" gracePeriod=30 Sep 29 09:48:12 crc kubenswrapper[4779]: I0929 09:48:12.988388 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="dd0896b7-c098-42f8-bad0-78f58eefb79e" containerName="ceilometer-notification-agent" containerID="cri-o://2164e7bbc210d5fb99af1192bbdb956984a5150c7df52de4fedce3bc5b7b7c09" gracePeriod=30 Sep 29 09:48:12 crc kubenswrapper[4779]: I0929 09:48:12.988420 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="dd0896b7-c098-42f8-bad0-78f58eefb79e" containerName="proxy-httpd" containerID="cri-o://90cda883268c180dad806a29cc500ccc707a72d684119dc8afeafd96a2e35277" gracePeriod=30 Sep 29 09:48:12 crc kubenswrapper[4779]: I0929 09:48:12.988377 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="dd0896b7-c098-42f8-bad0-78f58eefb79e" containerName="sg-core" containerID="cri-o://371cff0619dea199c652ff9f2978b60193d463885afce0e0dc6445b4c578a171" gracePeriod=30 Sep 29 09:48:13 crc kubenswrapper[4779]: I0929 09:48:13.826553 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-n7dpw"] Sep 29 09:48:13 crc kubenswrapper[4779]: E0929 09:48:13.827207 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32ffe9bf-069f-4ba3-972b-abb6917e29b3" containerName="mariadb-account-create" Sep 29 09:48:13 crc kubenswrapper[4779]: I0929 09:48:13.827221 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="32ffe9bf-069f-4ba3-972b-abb6917e29b3" containerName="mariadb-account-create" Sep 29 09:48:13 crc kubenswrapper[4779]: E0929 09:48:13.827255 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00efaa87-60ad-4589-93fc-560b8acf0d5a" containerName="mariadb-account-create" Sep 29 09:48:13 crc kubenswrapper[4779]: I0929 09:48:13.827262 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="00efaa87-60ad-4589-93fc-560b8acf0d5a" containerName="mariadb-account-create" Sep 29 09:48:13 crc kubenswrapper[4779]: E0929 09:48:13.827275 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c4225e6-9b2d-4c40-a892-baa6290c8ebb" containerName="mariadb-account-create" Sep 29 09:48:13 crc kubenswrapper[4779]: I0929 09:48:13.827283 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c4225e6-9b2d-4c40-a892-baa6290c8ebb" containerName="mariadb-account-create" Sep 29 09:48:13 crc kubenswrapper[4779]: I0929 09:48:13.827458 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="00efaa87-60ad-4589-93fc-560b8acf0d5a" containerName="mariadb-account-create" Sep 29 09:48:13 crc kubenswrapper[4779]: I0929 09:48:13.827470 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c4225e6-9b2d-4c40-a892-baa6290c8ebb" containerName="mariadb-account-create" Sep 29 09:48:13 crc kubenswrapper[4779]: I0929 09:48:13.827484 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="32ffe9bf-069f-4ba3-972b-abb6917e29b3" containerName="mariadb-account-create" Sep 29 09:48:13 crc kubenswrapper[4779]: I0929 09:48:13.828139 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-n7dpw" Sep 29 09:48:13 crc kubenswrapper[4779]: I0929 09:48:13.831086 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Sep 29 09:48:13 crc kubenswrapper[4779]: I0929 09:48:13.831096 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-8dh5h" Sep 29 09:48:13 crc kubenswrapper[4779]: I0929 09:48:13.831436 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Sep 29 09:48:13 crc kubenswrapper[4779]: I0929 09:48:13.834485 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-n7dpw"] Sep 29 09:48:13 crc kubenswrapper[4779]: I0929 09:48:13.858585 4779 generic.go:334] "Generic (PLEG): container finished" podID="dd0896b7-c098-42f8-bad0-78f58eefb79e" containerID="90cda883268c180dad806a29cc500ccc707a72d684119dc8afeafd96a2e35277" exitCode=0 Sep 29 09:48:13 crc kubenswrapper[4779]: I0929 09:48:13.858612 4779 generic.go:334] "Generic (PLEG): container finished" podID="dd0896b7-c098-42f8-bad0-78f58eefb79e" containerID="371cff0619dea199c652ff9f2978b60193d463885afce0e0dc6445b4c578a171" exitCode=2 Sep 29 09:48:13 crc kubenswrapper[4779]: I0929 09:48:13.858621 4779 generic.go:334] "Generic (PLEG): container finished" podID="dd0896b7-c098-42f8-bad0-78f58eefb79e" containerID="2164e7bbc210d5fb99af1192bbdb956984a5150c7df52de4fedce3bc5b7b7c09" exitCode=0 Sep 29 09:48:13 crc kubenswrapper[4779]: I0929 09:48:13.858638 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dd0896b7-c098-42f8-bad0-78f58eefb79e","Type":"ContainerDied","Data":"90cda883268c180dad806a29cc500ccc707a72d684119dc8afeafd96a2e35277"} Sep 29 09:48:13 crc kubenswrapper[4779]: I0929 09:48:13.858672 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dd0896b7-c098-42f8-bad0-78f58eefb79e","Type":"ContainerDied","Data":"371cff0619dea199c652ff9f2978b60193d463885afce0e0dc6445b4c578a171"} Sep 29 09:48:13 crc kubenswrapper[4779]: I0929 09:48:13.858685 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dd0896b7-c098-42f8-bad0-78f58eefb79e","Type":"ContainerDied","Data":"2164e7bbc210d5fb99af1192bbdb956984a5150c7df52de4fedce3bc5b7b7c09"} Sep 29 09:48:13 crc kubenswrapper[4779]: I0929 09:48:13.908021 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d8fc4bcf-88f0-4a27-97e2-e9b452a630c0-scripts\") pod \"nova-cell0-conductor-db-sync-n7dpw\" (UID: \"d8fc4bcf-88f0-4a27-97e2-e9b452a630c0\") " pod="openstack/nova-cell0-conductor-db-sync-n7dpw" Sep 29 09:48:13 crc kubenswrapper[4779]: I0929 09:48:13.908088 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8fc4bcf-88f0-4a27-97e2-e9b452a630c0-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-n7dpw\" (UID: \"d8fc4bcf-88f0-4a27-97e2-e9b452a630c0\") " pod="openstack/nova-cell0-conductor-db-sync-n7dpw" Sep 29 09:48:13 crc kubenswrapper[4779]: I0929 09:48:13.908136 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d8fc4bcf-88f0-4a27-97e2-e9b452a630c0-config-data\") pod \"nova-cell0-conductor-db-sync-n7dpw\" (UID: \"d8fc4bcf-88f0-4a27-97e2-e9b452a630c0\") " pod="openstack/nova-cell0-conductor-db-sync-n7dpw" Sep 29 09:48:13 crc kubenswrapper[4779]: I0929 09:48:13.908207 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4kwr6\" (UniqueName: \"kubernetes.io/projected/d8fc4bcf-88f0-4a27-97e2-e9b452a630c0-kube-api-access-4kwr6\") pod \"nova-cell0-conductor-db-sync-n7dpw\" (UID: \"d8fc4bcf-88f0-4a27-97e2-e9b452a630c0\") " pod="openstack/nova-cell0-conductor-db-sync-n7dpw" Sep 29 09:48:14 crc kubenswrapper[4779]: I0929 09:48:14.010275 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4kwr6\" (UniqueName: \"kubernetes.io/projected/d8fc4bcf-88f0-4a27-97e2-e9b452a630c0-kube-api-access-4kwr6\") pod \"nova-cell0-conductor-db-sync-n7dpw\" (UID: \"d8fc4bcf-88f0-4a27-97e2-e9b452a630c0\") " pod="openstack/nova-cell0-conductor-db-sync-n7dpw" Sep 29 09:48:14 crc kubenswrapper[4779]: I0929 09:48:14.010398 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d8fc4bcf-88f0-4a27-97e2-e9b452a630c0-scripts\") pod \"nova-cell0-conductor-db-sync-n7dpw\" (UID: \"d8fc4bcf-88f0-4a27-97e2-e9b452a630c0\") " pod="openstack/nova-cell0-conductor-db-sync-n7dpw" Sep 29 09:48:14 crc kubenswrapper[4779]: I0929 09:48:14.011362 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8fc4bcf-88f0-4a27-97e2-e9b452a630c0-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-n7dpw\" (UID: \"d8fc4bcf-88f0-4a27-97e2-e9b452a630c0\") " pod="openstack/nova-cell0-conductor-db-sync-n7dpw" Sep 29 09:48:14 crc kubenswrapper[4779]: I0929 09:48:14.011449 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d8fc4bcf-88f0-4a27-97e2-e9b452a630c0-config-data\") pod \"nova-cell0-conductor-db-sync-n7dpw\" (UID: \"d8fc4bcf-88f0-4a27-97e2-e9b452a630c0\") " pod="openstack/nova-cell0-conductor-db-sync-n7dpw" Sep 29 09:48:14 crc kubenswrapper[4779]: I0929 09:48:14.015959 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8fc4bcf-88f0-4a27-97e2-e9b452a630c0-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-n7dpw\" (UID: \"d8fc4bcf-88f0-4a27-97e2-e9b452a630c0\") " pod="openstack/nova-cell0-conductor-db-sync-n7dpw" Sep 29 09:48:14 crc kubenswrapper[4779]: I0929 09:48:14.016034 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d8fc4bcf-88f0-4a27-97e2-e9b452a630c0-config-data\") pod \"nova-cell0-conductor-db-sync-n7dpw\" (UID: \"d8fc4bcf-88f0-4a27-97e2-e9b452a630c0\") " pod="openstack/nova-cell0-conductor-db-sync-n7dpw" Sep 29 09:48:14 crc kubenswrapper[4779]: I0929 09:48:14.022318 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d8fc4bcf-88f0-4a27-97e2-e9b452a630c0-scripts\") pod \"nova-cell0-conductor-db-sync-n7dpw\" (UID: \"d8fc4bcf-88f0-4a27-97e2-e9b452a630c0\") " pod="openstack/nova-cell0-conductor-db-sync-n7dpw" Sep 29 09:48:14 crc kubenswrapper[4779]: I0929 09:48:14.026149 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4kwr6\" (UniqueName: \"kubernetes.io/projected/d8fc4bcf-88f0-4a27-97e2-e9b452a630c0-kube-api-access-4kwr6\") pod \"nova-cell0-conductor-db-sync-n7dpw\" (UID: \"d8fc4bcf-88f0-4a27-97e2-e9b452a630c0\") " pod="openstack/nova-cell0-conductor-db-sync-n7dpw" Sep 29 09:48:14 crc kubenswrapper[4779]: I0929 09:48:14.144933 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-n7dpw" Sep 29 09:48:14 crc kubenswrapper[4779]: I0929 09:48:14.292468 4779 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack/watcher-decision-engine-0" Sep 29 09:48:14 crc kubenswrapper[4779]: I0929 09:48:14.292828 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-decision-engine-0" Sep 29 09:48:14 crc kubenswrapper[4779]: I0929 09:48:14.293706 4779 scope.go:117] "RemoveContainer" containerID="04ec2cc73b2c81ba077c7fc00e233e5df22795dfb270435052946778c9421ed3" Sep 29 09:48:14 crc kubenswrapper[4779]: E0929 09:48:14.293985 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-decision-engine\" with CrashLoopBackOff: \"back-off 40s restarting failed container=watcher-decision-engine pod=watcher-decision-engine-0_openstack(f67e636b-969b-48ee-bbec-3d8b38b22274)\"" pod="openstack/watcher-decision-engine-0" podUID="f67e636b-969b-48ee-bbec-3d8b38b22274" Sep 29 09:48:14 crc kubenswrapper[4779]: I0929 09:48:14.644158 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-n7dpw"] Sep 29 09:48:14 crc kubenswrapper[4779]: I0929 09:48:14.878134 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 09:48:14 crc kubenswrapper[4779]: I0929 09:48:14.878675 4779 generic.go:334] "Generic (PLEG): container finished" podID="dd0896b7-c098-42f8-bad0-78f58eefb79e" containerID="18b11631f9c05eab44444c644864d047854414eb4e040d4211d5a9892193246d" exitCode=0 Sep 29 09:48:14 crc kubenswrapper[4779]: I0929 09:48:14.878767 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dd0896b7-c098-42f8-bad0-78f58eefb79e","Type":"ContainerDied","Data":"18b11631f9c05eab44444c644864d047854414eb4e040d4211d5a9892193246d"} Sep 29 09:48:14 crc kubenswrapper[4779]: I0929 09:48:14.878796 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dd0896b7-c098-42f8-bad0-78f58eefb79e","Type":"ContainerDied","Data":"22e42c6a1aec0629bdfa4fccce8cf3a3b79e630f5c425431b917d47abee1f42e"} Sep 29 09:48:14 crc kubenswrapper[4779]: I0929 09:48:14.878812 4779 scope.go:117] "RemoveContainer" containerID="90cda883268c180dad806a29cc500ccc707a72d684119dc8afeafd96a2e35277" Sep 29 09:48:14 crc kubenswrapper[4779]: I0929 09:48:14.881463 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-n7dpw" event={"ID":"d8fc4bcf-88f0-4a27-97e2-e9b452a630c0","Type":"ContainerStarted","Data":"19d712a00e48d22bcdc26064fea334d7b189989421aca98e2a28f5e33ed0d3ea"} Sep 29 09:48:14 crc kubenswrapper[4779]: I0929 09:48:14.929131 4779 scope.go:117] "RemoveContainer" containerID="371cff0619dea199c652ff9f2978b60193d463885afce0e0dc6445b4c578a171" Sep 29 09:48:14 crc kubenswrapper[4779]: I0929 09:48:14.938812 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dd0896b7-c098-42f8-bad0-78f58eefb79e-scripts\") pod \"dd0896b7-c098-42f8-bad0-78f58eefb79e\" (UID: \"dd0896b7-c098-42f8-bad0-78f58eefb79e\") " Sep 29 09:48:14 crc kubenswrapper[4779]: I0929 09:48:14.938858 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd0896b7-c098-42f8-bad0-78f58eefb79e-config-data\") pod \"dd0896b7-c098-42f8-bad0-78f58eefb79e\" (UID: \"dd0896b7-c098-42f8-bad0-78f58eefb79e\") " Sep 29 09:48:14 crc kubenswrapper[4779]: I0929 09:48:14.939343 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dd0896b7-c098-42f8-bad0-78f58eefb79e-log-httpd\") pod \"dd0896b7-c098-42f8-bad0-78f58eefb79e\" (UID: \"dd0896b7-c098-42f8-bad0-78f58eefb79e\") " Sep 29 09:48:14 crc kubenswrapper[4779]: I0929 09:48:14.939412 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dd0896b7-c098-42f8-bad0-78f58eefb79e-run-httpd\") pod \"dd0896b7-c098-42f8-bad0-78f58eefb79e\" (UID: \"dd0896b7-c098-42f8-bad0-78f58eefb79e\") " Sep 29 09:48:14 crc kubenswrapper[4779]: I0929 09:48:14.939470 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd0896b7-c098-42f8-bad0-78f58eefb79e-combined-ca-bundle\") pod \"dd0896b7-c098-42f8-bad0-78f58eefb79e\" (UID: \"dd0896b7-c098-42f8-bad0-78f58eefb79e\") " Sep 29 09:48:14 crc kubenswrapper[4779]: I0929 09:48:14.939521 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/dd0896b7-c098-42f8-bad0-78f58eefb79e-sg-core-conf-yaml\") pod \"dd0896b7-c098-42f8-bad0-78f58eefb79e\" (UID: \"dd0896b7-c098-42f8-bad0-78f58eefb79e\") " Sep 29 09:48:14 crc kubenswrapper[4779]: I0929 09:48:14.939547 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-46h2n\" (UniqueName: \"kubernetes.io/projected/dd0896b7-c098-42f8-bad0-78f58eefb79e-kube-api-access-46h2n\") pod \"dd0896b7-c098-42f8-bad0-78f58eefb79e\" (UID: \"dd0896b7-c098-42f8-bad0-78f58eefb79e\") " Sep 29 09:48:14 crc kubenswrapper[4779]: I0929 09:48:14.939863 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dd0896b7-c098-42f8-bad0-78f58eefb79e-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "dd0896b7-c098-42f8-bad0-78f58eefb79e" (UID: "dd0896b7-c098-42f8-bad0-78f58eefb79e"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:48:14 crc kubenswrapper[4779]: I0929 09:48:14.940297 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dd0896b7-c098-42f8-bad0-78f58eefb79e-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "dd0896b7-c098-42f8-bad0-78f58eefb79e" (UID: "dd0896b7-c098-42f8-bad0-78f58eefb79e"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:48:14 crc kubenswrapper[4779]: I0929 09:48:14.940737 4779 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dd0896b7-c098-42f8-bad0-78f58eefb79e-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 09:48:14 crc kubenswrapper[4779]: I0929 09:48:14.940757 4779 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dd0896b7-c098-42f8-bad0-78f58eefb79e-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 09:48:14 crc kubenswrapper[4779]: I0929 09:48:14.944929 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd0896b7-c098-42f8-bad0-78f58eefb79e-kube-api-access-46h2n" (OuterVolumeSpecName: "kube-api-access-46h2n") pod "dd0896b7-c098-42f8-bad0-78f58eefb79e" (UID: "dd0896b7-c098-42f8-bad0-78f58eefb79e"). InnerVolumeSpecName "kube-api-access-46h2n". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:48:14 crc kubenswrapper[4779]: I0929 09:48:14.948513 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd0896b7-c098-42f8-bad0-78f58eefb79e-scripts" (OuterVolumeSpecName: "scripts") pod "dd0896b7-c098-42f8-bad0-78f58eefb79e" (UID: "dd0896b7-c098-42f8-bad0-78f58eefb79e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:48:15 crc kubenswrapper[4779]: I0929 09:48:15.002175 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd0896b7-c098-42f8-bad0-78f58eefb79e-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "dd0896b7-c098-42f8-bad0-78f58eefb79e" (UID: "dd0896b7-c098-42f8-bad0-78f58eefb79e"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:48:15 crc kubenswrapper[4779]: I0929 09:48:15.042746 4779 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dd0896b7-c098-42f8-bad0-78f58eefb79e-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 09:48:15 crc kubenswrapper[4779]: I0929 09:48:15.042775 4779 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/dd0896b7-c098-42f8-bad0-78f58eefb79e-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 29 09:48:15 crc kubenswrapper[4779]: I0929 09:48:15.042786 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-46h2n\" (UniqueName: \"kubernetes.io/projected/dd0896b7-c098-42f8-bad0-78f58eefb79e-kube-api-access-46h2n\") on node \"crc\" DevicePath \"\"" Sep 29 09:48:15 crc kubenswrapper[4779]: I0929 09:48:15.045758 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd0896b7-c098-42f8-bad0-78f58eefb79e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dd0896b7-c098-42f8-bad0-78f58eefb79e" (UID: "dd0896b7-c098-42f8-bad0-78f58eefb79e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:48:15 crc kubenswrapper[4779]: I0929 09:48:15.076994 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd0896b7-c098-42f8-bad0-78f58eefb79e-config-data" (OuterVolumeSpecName: "config-data") pod "dd0896b7-c098-42f8-bad0-78f58eefb79e" (UID: "dd0896b7-c098-42f8-bad0-78f58eefb79e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:48:15 crc kubenswrapper[4779]: I0929 09:48:15.077240 4779 scope.go:117] "RemoveContainer" containerID="2164e7bbc210d5fb99af1192bbdb956984a5150c7df52de4fedce3bc5b7b7c09" Sep 29 09:48:15 crc kubenswrapper[4779]: I0929 09:48:15.118645 4779 scope.go:117] "RemoveContainer" containerID="18b11631f9c05eab44444c644864d047854414eb4e040d4211d5a9892193246d" Sep 29 09:48:15 crc kubenswrapper[4779]: I0929 09:48:15.144993 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd0896b7-c098-42f8-bad0-78f58eefb79e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 09:48:15 crc kubenswrapper[4779]: I0929 09:48:15.145243 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd0896b7-c098-42f8-bad0-78f58eefb79e-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 09:48:15 crc kubenswrapper[4779]: I0929 09:48:15.164098 4779 scope.go:117] "RemoveContainer" containerID="90cda883268c180dad806a29cc500ccc707a72d684119dc8afeafd96a2e35277" Sep 29 09:48:15 crc kubenswrapper[4779]: E0929 09:48:15.164652 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"90cda883268c180dad806a29cc500ccc707a72d684119dc8afeafd96a2e35277\": container with ID starting with 90cda883268c180dad806a29cc500ccc707a72d684119dc8afeafd96a2e35277 not found: ID does not exist" containerID="90cda883268c180dad806a29cc500ccc707a72d684119dc8afeafd96a2e35277" Sep 29 09:48:15 crc kubenswrapper[4779]: I0929 09:48:15.164700 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"90cda883268c180dad806a29cc500ccc707a72d684119dc8afeafd96a2e35277"} err="failed to get container status \"90cda883268c180dad806a29cc500ccc707a72d684119dc8afeafd96a2e35277\": rpc error: code = NotFound desc = could not find container \"90cda883268c180dad806a29cc500ccc707a72d684119dc8afeafd96a2e35277\": container with ID starting with 90cda883268c180dad806a29cc500ccc707a72d684119dc8afeafd96a2e35277 not found: ID does not exist" Sep 29 09:48:15 crc kubenswrapper[4779]: I0929 09:48:15.164728 4779 scope.go:117] "RemoveContainer" containerID="371cff0619dea199c652ff9f2978b60193d463885afce0e0dc6445b4c578a171" Sep 29 09:48:15 crc kubenswrapper[4779]: E0929 09:48:15.165048 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"371cff0619dea199c652ff9f2978b60193d463885afce0e0dc6445b4c578a171\": container with ID starting with 371cff0619dea199c652ff9f2978b60193d463885afce0e0dc6445b4c578a171 not found: ID does not exist" containerID="371cff0619dea199c652ff9f2978b60193d463885afce0e0dc6445b4c578a171" Sep 29 09:48:15 crc kubenswrapper[4779]: I0929 09:48:15.165101 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"371cff0619dea199c652ff9f2978b60193d463885afce0e0dc6445b4c578a171"} err="failed to get container status \"371cff0619dea199c652ff9f2978b60193d463885afce0e0dc6445b4c578a171\": rpc error: code = NotFound desc = could not find container \"371cff0619dea199c652ff9f2978b60193d463885afce0e0dc6445b4c578a171\": container with ID starting with 371cff0619dea199c652ff9f2978b60193d463885afce0e0dc6445b4c578a171 not found: ID does not exist" Sep 29 09:48:15 crc kubenswrapper[4779]: I0929 09:48:15.165135 4779 scope.go:117] "RemoveContainer" containerID="2164e7bbc210d5fb99af1192bbdb956984a5150c7df52de4fedce3bc5b7b7c09" Sep 29 09:48:15 crc kubenswrapper[4779]: E0929 09:48:15.167952 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2164e7bbc210d5fb99af1192bbdb956984a5150c7df52de4fedce3bc5b7b7c09\": container with ID starting with 2164e7bbc210d5fb99af1192bbdb956984a5150c7df52de4fedce3bc5b7b7c09 not found: ID does not exist" containerID="2164e7bbc210d5fb99af1192bbdb956984a5150c7df52de4fedce3bc5b7b7c09" Sep 29 09:48:15 crc kubenswrapper[4779]: I0929 09:48:15.168027 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2164e7bbc210d5fb99af1192bbdb956984a5150c7df52de4fedce3bc5b7b7c09"} err="failed to get container status \"2164e7bbc210d5fb99af1192bbdb956984a5150c7df52de4fedce3bc5b7b7c09\": rpc error: code = NotFound desc = could not find container \"2164e7bbc210d5fb99af1192bbdb956984a5150c7df52de4fedce3bc5b7b7c09\": container with ID starting with 2164e7bbc210d5fb99af1192bbdb956984a5150c7df52de4fedce3bc5b7b7c09 not found: ID does not exist" Sep 29 09:48:15 crc kubenswrapper[4779]: I0929 09:48:15.168090 4779 scope.go:117] "RemoveContainer" containerID="18b11631f9c05eab44444c644864d047854414eb4e040d4211d5a9892193246d" Sep 29 09:48:15 crc kubenswrapper[4779]: E0929 09:48:15.169214 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"18b11631f9c05eab44444c644864d047854414eb4e040d4211d5a9892193246d\": container with ID starting with 18b11631f9c05eab44444c644864d047854414eb4e040d4211d5a9892193246d not found: ID does not exist" containerID="18b11631f9c05eab44444c644864d047854414eb4e040d4211d5a9892193246d" Sep 29 09:48:15 crc kubenswrapper[4779]: I0929 09:48:15.169332 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"18b11631f9c05eab44444c644864d047854414eb4e040d4211d5a9892193246d"} err="failed to get container status \"18b11631f9c05eab44444c644864d047854414eb4e040d4211d5a9892193246d\": rpc error: code = NotFound desc = could not find container \"18b11631f9c05eab44444c644864d047854414eb4e040d4211d5a9892193246d\": container with ID starting with 18b11631f9c05eab44444c644864d047854414eb4e040d4211d5a9892193246d not found: ID does not exist" Sep 29 09:48:15 crc kubenswrapper[4779]: I0929 09:48:15.897312 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 09:48:15 crc kubenswrapper[4779]: I0929 09:48:15.939007 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 09:48:15 crc kubenswrapper[4779]: I0929 09:48:15.951213 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 29 09:48:15 crc kubenswrapper[4779]: I0929 09:48:15.964399 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 09:48:15 crc kubenswrapper[4779]: E0929 09:48:15.964896 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd0896b7-c098-42f8-bad0-78f58eefb79e" containerName="ceilometer-notification-agent" Sep 29 09:48:15 crc kubenswrapper[4779]: I0929 09:48:15.964937 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd0896b7-c098-42f8-bad0-78f58eefb79e" containerName="ceilometer-notification-agent" Sep 29 09:48:15 crc kubenswrapper[4779]: E0929 09:48:15.964955 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd0896b7-c098-42f8-bad0-78f58eefb79e" containerName="proxy-httpd" Sep 29 09:48:15 crc kubenswrapper[4779]: I0929 09:48:15.964963 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd0896b7-c098-42f8-bad0-78f58eefb79e" containerName="proxy-httpd" Sep 29 09:48:15 crc kubenswrapper[4779]: E0929 09:48:15.964983 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd0896b7-c098-42f8-bad0-78f58eefb79e" containerName="ceilometer-central-agent" Sep 29 09:48:15 crc kubenswrapper[4779]: I0929 09:48:15.964990 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd0896b7-c098-42f8-bad0-78f58eefb79e" containerName="ceilometer-central-agent" Sep 29 09:48:15 crc kubenswrapper[4779]: E0929 09:48:15.965008 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd0896b7-c098-42f8-bad0-78f58eefb79e" containerName="sg-core" Sep 29 09:48:15 crc kubenswrapper[4779]: I0929 09:48:15.965014 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd0896b7-c098-42f8-bad0-78f58eefb79e" containerName="sg-core" Sep 29 09:48:15 crc kubenswrapper[4779]: I0929 09:48:15.965227 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd0896b7-c098-42f8-bad0-78f58eefb79e" containerName="ceilometer-central-agent" Sep 29 09:48:15 crc kubenswrapper[4779]: I0929 09:48:15.965253 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd0896b7-c098-42f8-bad0-78f58eefb79e" containerName="sg-core" Sep 29 09:48:15 crc kubenswrapper[4779]: I0929 09:48:15.965289 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd0896b7-c098-42f8-bad0-78f58eefb79e" containerName="proxy-httpd" Sep 29 09:48:15 crc kubenswrapper[4779]: I0929 09:48:15.965300 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd0896b7-c098-42f8-bad0-78f58eefb79e" containerName="ceilometer-notification-agent" Sep 29 09:48:15 crc kubenswrapper[4779]: I0929 09:48:15.967393 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 09:48:15 crc kubenswrapper[4779]: I0929 09:48:15.972654 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 09:48:15 crc kubenswrapper[4779]: I0929 09:48:15.972920 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 09:48:15 crc kubenswrapper[4779]: I0929 09:48:15.995097 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 09:48:16 crc kubenswrapper[4779]: I0929 09:48:16.059560 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0cfd9438-efa0-458e-b810-942fec5bb8a5-run-httpd\") pod \"ceilometer-0\" (UID: \"0cfd9438-efa0-458e-b810-942fec5bb8a5\") " pod="openstack/ceilometer-0" Sep 29 09:48:16 crc kubenswrapper[4779]: I0929 09:48:16.059624 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0cfd9438-efa0-458e-b810-942fec5bb8a5-config-data\") pod \"ceilometer-0\" (UID: \"0cfd9438-efa0-458e-b810-942fec5bb8a5\") " pod="openstack/ceilometer-0" Sep 29 09:48:16 crc kubenswrapper[4779]: I0929 09:48:16.059675 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qt828\" (UniqueName: \"kubernetes.io/projected/0cfd9438-efa0-458e-b810-942fec5bb8a5-kube-api-access-qt828\") pod \"ceilometer-0\" (UID: \"0cfd9438-efa0-458e-b810-942fec5bb8a5\") " pod="openstack/ceilometer-0" Sep 29 09:48:16 crc kubenswrapper[4779]: I0929 09:48:16.059710 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0cfd9438-efa0-458e-b810-942fec5bb8a5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0cfd9438-efa0-458e-b810-942fec5bb8a5\") " pod="openstack/ceilometer-0" Sep 29 09:48:16 crc kubenswrapper[4779]: I0929 09:48:16.059754 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0cfd9438-efa0-458e-b810-942fec5bb8a5-log-httpd\") pod \"ceilometer-0\" (UID: \"0cfd9438-efa0-458e-b810-942fec5bb8a5\") " pod="openstack/ceilometer-0" Sep 29 09:48:16 crc kubenswrapper[4779]: I0929 09:48:16.059784 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0cfd9438-efa0-458e-b810-942fec5bb8a5-scripts\") pod \"ceilometer-0\" (UID: \"0cfd9438-efa0-458e-b810-942fec5bb8a5\") " pod="openstack/ceilometer-0" Sep 29 09:48:16 crc kubenswrapper[4779]: I0929 09:48:16.059808 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0cfd9438-efa0-458e-b810-942fec5bb8a5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0cfd9438-efa0-458e-b810-942fec5bb8a5\") " pod="openstack/ceilometer-0" Sep 29 09:48:16 crc kubenswrapper[4779]: I0929 09:48:16.161662 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0cfd9438-efa0-458e-b810-942fec5bb8a5-run-httpd\") pod \"ceilometer-0\" (UID: \"0cfd9438-efa0-458e-b810-942fec5bb8a5\") " pod="openstack/ceilometer-0" Sep 29 09:48:16 crc kubenswrapper[4779]: I0929 09:48:16.162151 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0cfd9438-efa0-458e-b810-942fec5bb8a5-config-data\") pod \"ceilometer-0\" (UID: \"0cfd9438-efa0-458e-b810-942fec5bb8a5\") " pod="openstack/ceilometer-0" Sep 29 09:48:16 crc kubenswrapper[4779]: I0929 09:48:16.162187 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qt828\" (UniqueName: \"kubernetes.io/projected/0cfd9438-efa0-458e-b810-942fec5bb8a5-kube-api-access-qt828\") pod \"ceilometer-0\" (UID: \"0cfd9438-efa0-458e-b810-942fec5bb8a5\") " pod="openstack/ceilometer-0" Sep 29 09:48:16 crc kubenswrapper[4779]: I0929 09:48:16.162224 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0cfd9438-efa0-458e-b810-942fec5bb8a5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0cfd9438-efa0-458e-b810-942fec5bb8a5\") " pod="openstack/ceilometer-0" Sep 29 09:48:16 crc kubenswrapper[4779]: I0929 09:48:16.162271 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0cfd9438-efa0-458e-b810-942fec5bb8a5-log-httpd\") pod \"ceilometer-0\" (UID: \"0cfd9438-efa0-458e-b810-942fec5bb8a5\") " pod="openstack/ceilometer-0" Sep 29 09:48:16 crc kubenswrapper[4779]: I0929 09:48:16.162309 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0cfd9438-efa0-458e-b810-942fec5bb8a5-scripts\") pod \"ceilometer-0\" (UID: \"0cfd9438-efa0-458e-b810-942fec5bb8a5\") " pod="openstack/ceilometer-0" Sep 29 09:48:16 crc kubenswrapper[4779]: I0929 09:48:16.162337 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0cfd9438-efa0-458e-b810-942fec5bb8a5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0cfd9438-efa0-458e-b810-942fec5bb8a5\") " pod="openstack/ceilometer-0" Sep 29 09:48:16 crc kubenswrapper[4779]: I0929 09:48:16.163993 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0cfd9438-efa0-458e-b810-942fec5bb8a5-log-httpd\") pod \"ceilometer-0\" (UID: \"0cfd9438-efa0-458e-b810-942fec5bb8a5\") " pod="openstack/ceilometer-0" Sep 29 09:48:16 crc kubenswrapper[4779]: I0929 09:48:16.163685 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0cfd9438-efa0-458e-b810-942fec5bb8a5-run-httpd\") pod \"ceilometer-0\" (UID: \"0cfd9438-efa0-458e-b810-942fec5bb8a5\") " pod="openstack/ceilometer-0" Sep 29 09:48:16 crc kubenswrapper[4779]: I0929 09:48:16.168349 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0cfd9438-efa0-458e-b810-942fec5bb8a5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0cfd9438-efa0-458e-b810-942fec5bb8a5\") " pod="openstack/ceilometer-0" Sep 29 09:48:16 crc kubenswrapper[4779]: I0929 09:48:16.169033 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0cfd9438-efa0-458e-b810-942fec5bb8a5-config-data\") pod \"ceilometer-0\" (UID: \"0cfd9438-efa0-458e-b810-942fec5bb8a5\") " pod="openstack/ceilometer-0" Sep 29 09:48:16 crc kubenswrapper[4779]: I0929 09:48:16.174195 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0cfd9438-efa0-458e-b810-942fec5bb8a5-scripts\") pod \"ceilometer-0\" (UID: \"0cfd9438-efa0-458e-b810-942fec5bb8a5\") " pod="openstack/ceilometer-0" Sep 29 09:48:16 crc kubenswrapper[4779]: I0929 09:48:16.182160 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qt828\" (UniqueName: \"kubernetes.io/projected/0cfd9438-efa0-458e-b810-942fec5bb8a5-kube-api-access-qt828\") pod \"ceilometer-0\" (UID: \"0cfd9438-efa0-458e-b810-942fec5bb8a5\") " pod="openstack/ceilometer-0" Sep 29 09:48:16 crc kubenswrapper[4779]: I0929 09:48:16.183419 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0cfd9438-efa0-458e-b810-942fec5bb8a5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0cfd9438-efa0-458e-b810-942fec5bb8a5\") " pod="openstack/ceilometer-0" Sep 29 09:48:16 crc kubenswrapper[4779]: I0929 09:48:16.373457 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 09:48:16 crc kubenswrapper[4779]: I0929 09:48:16.470471 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7fb8cfdfb-ff7cr" Sep 29 09:48:16 crc kubenswrapper[4779]: I0929 09:48:16.571834 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c1f1c031-af73-4248-b0ee-6c9a8e6c2a82-ovndb-tls-certs\") pod \"c1f1c031-af73-4248-b0ee-6c9a8e6c2a82\" (UID: \"c1f1c031-af73-4248-b0ee-6c9a8e6c2a82\") " Sep 29 09:48:16 crc kubenswrapper[4779]: I0929 09:48:16.572291 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1f1c031-af73-4248-b0ee-6c9a8e6c2a82-combined-ca-bundle\") pod \"c1f1c031-af73-4248-b0ee-6c9a8e6c2a82\" (UID: \"c1f1c031-af73-4248-b0ee-6c9a8e6c2a82\") " Sep 29 09:48:16 crc kubenswrapper[4779]: I0929 09:48:16.572372 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/c1f1c031-af73-4248-b0ee-6c9a8e6c2a82-httpd-config\") pod \"c1f1c031-af73-4248-b0ee-6c9a8e6c2a82\" (UID: \"c1f1c031-af73-4248-b0ee-6c9a8e6c2a82\") " Sep 29 09:48:16 crc kubenswrapper[4779]: I0929 09:48:16.572447 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ztcqm\" (UniqueName: \"kubernetes.io/projected/c1f1c031-af73-4248-b0ee-6c9a8e6c2a82-kube-api-access-ztcqm\") pod \"c1f1c031-af73-4248-b0ee-6c9a8e6c2a82\" (UID: \"c1f1c031-af73-4248-b0ee-6c9a8e6c2a82\") " Sep 29 09:48:16 crc kubenswrapper[4779]: I0929 09:48:16.572475 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/c1f1c031-af73-4248-b0ee-6c9a8e6c2a82-config\") pod \"c1f1c031-af73-4248-b0ee-6c9a8e6c2a82\" (UID: \"c1f1c031-af73-4248-b0ee-6c9a8e6c2a82\") " Sep 29 09:48:16 crc kubenswrapper[4779]: I0929 09:48:16.577439 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c1f1c031-af73-4248-b0ee-6c9a8e6c2a82-kube-api-access-ztcqm" (OuterVolumeSpecName: "kube-api-access-ztcqm") pod "c1f1c031-af73-4248-b0ee-6c9a8e6c2a82" (UID: "c1f1c031-af73-4248-b0ee-6c9a8e6c2a82"). InnerVolumeSpecName "kube-api-access-ztcqm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:48:16 crc kubenswrapper[4779]: I0929 09:48:16.577543 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c1f1c031-af73-4248-b0ee-6c9a8e6c2a82-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "c1f1c031-af73-4248-b0ee-6c9a8e6c2a82" (UID: "c1f1c031-af73-4248-b0ee-6c9a8e6c2a82"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:48:16 crc kubenswrapper[4779]: I0929 09:48:16.630055 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c1f1c031-af73-4248-b0ee-6c9a8e6c2a82-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c1f1c031-af73-4248-b0ee-6c9a8e6c2a82" (UID: "c1f1c031-af73-4248-b0ee-6c9a8e6c2a82"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:48:16 crc kubenswrapper[4779]: I0929 09:48:16.652574 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 09:48:16 crc kubenswrapper[4779]: I0929 09:48:16.657238 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c1f1c031-af73-4248-b0ee-6c9a8e6c2a82-config" (OuterVolumeSpecName: "config") pod "c1f1c031-af73-4248-b0ee-6c9a8e6c2a82" (UID: "c1f1c031-af73-4248-b0ee-6c9a8e6c2a82"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:48:16 crc kubenswrapper[4779]: I0929 09:48:16.664239 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c1f1c031-af73-4248-b0ee-6c9a8e6c2a82-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "c1f1c031-af73-4248-b0ee-6c9a8e6c2a82" (UID: "c1f1c031-af73-4248-b0ee-6c9a8e6c2a82"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:48:16 crc kubenswrapper[4779]: I0929 09:48:16.674865 4779 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/c1f1c031-af73-4248-b0ee-6c9a8e6c2a82-httpd-config\") on node \"crc\" DevicePath \"\"" Sep 29 09:48:16 crc kubenswrapper[4779]: I0929 09:48:16.674897 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ztcqm\" (UniqueName: \"kubernetes.io/projected/c1f1c031-af73-4248-b0ee-6c9a8e6c2a82-kube-api-access-ztcqm\") on node \"crc\" DevicePath \"\"" Sep 29 09:48:16 crc kubenswrapper[4779]: I0929 09:48:16.674923 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/c1f1c031-af73-4248-b0ee-6c9a8e6c2a82-config\") on node \"crc\" DevicePath \"\"" Sep 29 09:48:16 crc kubenswrapper[4779]: I0929 09:48:16.674931 4779 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c1f1c031-af73-4248-b0ee-6c9a8e6c2a82-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 09:48:16 crc kubenswrapper[4779]: I0929 09:48:16.674939 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1f1c031-af73-4248-b0ee-6c9a8e6c2a82-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 09:48:16 crc kubenswrapper[4779]: I0929 09:48:16.724167 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dd0896b7-c098-42f8-bad0-78f58eefb79e" path="/var/lib/kubelet/pods/dd0896b7-c098-42f8-bad0-78f58eefb79e/volumes" Sep 29 09:48:16 crc kubenswrapper[4779]: I0929 09:48:16.796879 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 09:48:16 crc kubenswrapper[4779]: W0929 09:48:16.807233 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0cfd9438_efa0_458e_b810_942fec5bb8a5.slice/crio-8d6517e88a091c3756e7a2d21a1397c1c4906ca7d8f2945c644b614c3415a017 WatchSource:0}: Error finding container 8d6517e88a091c3756e7a2d21a1397c1c4906ca7d8f2945c644b614c3415a017: Status 404 returned error can't find the container with id 8d6517e88a091c3756e7a2d21a1397c1c4906ca7d8f2945c644b614c3415a017 Sep 29 09:48:16 crc kubenswrapper[4779]: I0929 09:48:16.909832 4779 generic.go:334] "Generic (PLEG): container finished" podID="c1f1c031-af73-4248-b0ee-6c9a8e6c2a82" containerID="87be15ca283a6fc588a7296ec4ea350af474a0d635f62c88358442ff580a39de" exitCode=0 Sep 29 09:48:16 crc kubenswrapper[4779]: I0929 09:48:16.909926 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7fb8cfdfb-ff7cr" event={"ID":"c1f1c031-af73-4248-b0ee-6c9a8e6c2a82","Type":"ContainerDied","Data":"87be15ca283a6fc588a7296ec4ea350af474a0d635f62c88358442ff580a39de"} Sep 29 09:48:16 crc kubenswrapper[4779]: I0929 09:48:16.909973 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7fb8cfdfb-ff7cr" event={"ID":"c1f1c031-af73-4248-b0ee-6c9a8e6c2a82","Type":"ContainerDied","Data":"b3a4c1e024e4f8d359230221762e71495357df9efd56905b1a6f4405d499dcae"} Sep 29 09:48:16 crc kubenswrapper[4779]: I0929 09:48:16.909993 4779 scope.go:117] "RemoveContainer" containerID="eaaf7c3292add81067ab0de1f5e10235575d9488e8459c9f440fd91e212eb934" Sep 29 09:48:16 crc kubenswrapper[4779]: I0929 09:48:16.910131 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7fb8cfdfb-ff7cr" Sep 29 09:48:16 crc kubenswrapper[4779]: I0929 09:48:16.913194 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0cfd9438-efa0-458e-b810-942fec5bb8a5","Type":"ContainerStarted","Data":"8d6517e88a091c3756e7a2d21a1397c1c4906ca7d8f2945c644b614c3415a017"} Sep 29 09:48:17 crc kubenswrapper[4779]: I0929 09:48:17.002920 4779 scope.go:117] "RemoveContainer" containerID="87be15ca283a6fc588a7296ec4ea350af474a0d635f62c88358442ff580a39de" Sep 29 09:48:17 crc kubenswrapper[4779]: I0929 09:48:17.031276 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-7fb8cfdfb-ff7cr"] Sep 29 09:48:17 crc kubenswrapper[4779]: I0929 09:48:17.039192 4779 scope.go:117] "RemoveContainer" containerID="eaaf7c3292add81067ab0de1f5e10235575d9488e8459c9f440fd91e212eb934" Sep 29 09:48:17 crc kubenswrapper[4779]: E0929 09:48:17.039633 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eaaf7c3292add81067ab0de1f5e10235575d9488e8459c9f440fd91e212eb934\": container with ID starting with eaaf7c3292add81067ab0de1f5e10235575d9488e8459c9f440fd91e212eb934 not found: ID does not exist" containerID="eaaf7c3292add81067ab0de1f5e10235575d9488e8459c9f440fd91e212eb934" Sep 29 09:48:17 crc kubenswrapper[4779]: I0929 09:48:17.039669 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eaaf7c3292add81067ab0de1f5e10235575d9488e8459c9f440fd91e212eb934"} err="failed to get container status \"eaaf7c3292add81067ab0de1f5e10235575d9488e8459c9f440fd91e212eb934\": rpc error: code = NotFound desc = could not find container \"eaaf7c3292add81067ab0de1f5e10235575d9488e8459c9f440fd91e212eb934\": container with ID starting with eaaf7c3292add81067ab0de1f5e10235575d9488e8459c9f440fd91e212eb934 not found: ID does not exist" Sep 29 09:48:17 crc kubenswrapper[4779]: I0929 09:48:17.039688 4779 scope.go:117] "RemoveContainer" containerID="87be15ca283a6fc588a7296ec4ea350af474a0d635f62c88358442ff580a39de" Sep 29 09:48:17 crc kubenswrapper[4779]: E0929 09:48:17.040200 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"87be15ca283a6fc588a7296ec4ea350af474a0d635f62c88358442ff580a39de\": container with ID starting with 87be15ca283a6fc588a7296ec4ea350af474a0d635f62c88358442ff580a39de not found: ID does not exist" containerID="87be15ca283a6fc588a7296ec4ea350af474a0d635f62c88358442ff580a39de" Sep 29 09:48:17 crc kubenswrapper[4779]: I0929 09:48:17.040252 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"87be15ca283a6fc588a7296ec4ea350af474a0d635f62c88358442ff580a39de"} err="failed to get container status \"87be15ca283a6fc588a7296ec4ea350af474a0d635f62c88358442ff580a39de\": rpc error: code = NotFound desc = could not find container \"87be15ca283a6fc588a7296ec4ea350af474a0d635f62c88358442ff580a39de\": container with ID starting with 87be15ca283a6fc588a7296ec4ea350af474a0d635f62c88358442ff580a39de not found: ID does not exist" Sep 29 09:48:17 crc kubenswrapper[4779]: I0929 09:48:17.044342 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-7fb8cfdfb-ff7cr"] Sep 29 09:48:17 crc kubenswrapper[4779]: I0929 09:48:17.936173 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0cfd9438-efa0-458e-b810-942fec5bb8a5","Type":"ContainerStarted","Data":"0a92e50d2300b20b41e802265058451929fcd35e6795853a8e3ed82d24e69972"} Sep 29 09:48:17 crc kubenswrapper[4779]: I0929 09:48:17.936449 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0cfd9438-efa0-458e-b810-942fec5bb8a5","Type":"ContainerStarted","Data":"bc06db5fb3dbe913cc986da56e3acba863bec906e8dfbc7034c9a008f6bc1809"} Sep 29 09:48:18 crc kubenswrapper[4779]: I0929 09:48:18.728797 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c1f1c031-af73-4248-b0ee-6c9a8e6c2a82" path="/var/lib/kubelet/pods/c1f1c031-af73-4248-b0ee-6c9a8e6c2a82/volumes" Sep 29 09:48:18 crc kubenswrapper[4779]: I0929 09:48:18.951017 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0cfd9438-efa0-458e-b810-942fec5bb8a5","Type":"ContainerStarted","Data":"4fe0aee225e06750e48ccc7953ea69b9581f3a43cba9f6979b2c33a49edfa4fa"} Sep 29 09:48:23 crc kubenswrapper[4779]: I0929 09:48:23.997567 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-n7dpw" event={"ID":"d8fc4bcf-88f0-4a27-97e2-e9b452a630c0","Type":"ContainerStarted","Data":"c8f3399cd4b08d51e0415f62410d0e9b9bd65963ad041a47277217410d338f13"} Sep 29 09:48:24 crc kubenswrapper[4779]: I0929 09:48:24.004243 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0cfd9438-efa0-458e-b810-942fec5bb8a5","Type":"ContainerStarted","Data":"acef97b2d237d1ce25c808480d8766d4c235ffac7f263268643527485cc6cb2c"} Sep 29 09:48:24 crc kubenswrapper[4779]: I0929 09:48:24.004475 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0cfd9438-efa0-458e-b810-942fec5bb8a5" containerName="ceilometer-central-agent" containerID="cri-o://bc06db5fb3dbe913cc986da56e3acba863bec906e8dfbc7034c9a008f6bc1809" gracePeriod=30 Sep 29 09:48:24 crc kubenswrapper[4779]: I0929 09:48:24.004495 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 09:48:24 crc kubenswrapper[4779]: I0929 09:48:24.004514 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0cfd9438-efa0-458e-b810-942fec5bb8a5" containerName="proxy-httpd" containerID="cri-o://acef97b2d237d1ce25c808480d8766d4c235ffac7f263268643527485cc6cb2c" gracePeriod=30 Sep 29 09:48:24 crc kubenswrapper[4779]: I0929 09:48:24.004511 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0cfd9438-efa0-458e-b810-942fec5bb8a5" containerName="sg-core" containerID="cri-o://4fe0aee225e06750e48ccc7953ea69b9581f3a43cba9f6979b2c33a49edfa4fa" gracePeriod=30 Sep 29 09:48:24 crc kubenswrapper[4779]: I0929 09:48:24.004787 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0cfd9438-efa0-458e-b810-942fec5bb8a5" containerName="ceilometer-notification-agent" containerID="cri-o://0a92e50d2300b20b41e802265058451929fcd35e6795853a8e3ed82d24e69972" gracePeriod=30 Sep 29 09:48:24 crc kubenswrapper[4779]: I0929 09:48:24.022079 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-n7dpw" podStartSLOduration=2.138896873 podStartE2EDuration="11.021899695s" podCreationTimestamp="2025-09-29 09:48:13 +0000 UTC" firstStartedPulling="2025-09-29 09:48:14.670054609 +0000 UTC m=+1126.651378513" lastFinishedPulling="2025-09-29 09:48:23.553057401 +0000 UTC m=+1135.534381335" observedRunningTime="2025-09-29 09:48:24.018816177 +0000 UTC m=+1136.000140101" watchObservedRunningTime="2025-09-29 09:48:24.021899695 +0000 UTC m=+1136.003223639" Sep 29 09:48:24 crc kubenswrapper[4779]: I0929 09:48:24.057231 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.36170039 podStartE2EDuration="9.057212036s" podCreationTimestamp="2025-09-29 09:48:15 +0000 UTC" firstStartedPulling="2025-09-29 09:48:16.809988534 +0000 UTC m=+1128.791312438" lastFinishedPulling="2025-09-29 09:48:23.50550016 +0000 UTC m=+1135.486824084" observedRunningTime="2025-09-29 09:48:24.047015174 +0000 UTC m=+1136.028339068" watchObservedRunningTime="2025-09-29 09:48:24.057212036 +0000 UTC m=+1136.038535940" Sep 29 09:48:24 crc kubenswrapper[4779]: I0929 09:48:24.919973 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.014791 4779 generic.go:334] "Generic (PLEG): container finished" podID="0cfd9438-efa0-458e-b810-942fec5bb8a5" containerID="acef97b2d237d1ce25c808480d8766d4c235ffac7f263268643527485cc6cb2c" exitCode=0 Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.014816 4779 generic.go:334] "Generic (PLEG): container finished" podID="0cfd9438-efa0-458e-b810-942fec5bb8a5" containerID="4fe0aee225e06750e48ccc7953ea69b9581f3a43cba9f6979b2c33a49edfa4fa" exitCode=2 Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.014823 4779 generic.go:334] "Generic (PLEG): container finished" podID="0cfd9438-efa0-458e-b810-942fec5bb8a5" containerID="0a92e50d2300b20b41e802265058451929fcd35e6795853a8e3ed82d24e69972" exitCode=0 Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.014830 4779 generic.go:334] "Generic (PLEG): container finished" podID="0cfd9438-efa0-458e-b810-942fec5bb8a5" containerID="bc06db5fb3dbe913cc986da56e3acba863bec906e8dfbc7034c9a008f6bc1809" exitCode=0 Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.014845 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.014880 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0cfd9438-efa0-458e-b810-942fec5bb8a5","Type":"ContainerDied","Data":"acef97b2d237d1ce25c808480d8766d4c235ffac7f263268643527485cc6cb2c"} Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.014916 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0cfd9438-efa0-458e-b810-942fec5bb8a5","Type":"ContainerDied","Data":"4fe0aee225e06750e48ccc7953ea69b9581f3a43cba9f6979b2c33a49edfa4fa"} Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.014928 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0cfd9438-efa0-458e-b810-942fec5bb8a5","Type":"ContainerDied","Data":"0a92e50d2300b20b41e802265058451929fcd35e6795853a8e3ed82d24e69972"} Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.014937 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0cfd9438-efa0-458e-b810-942fec5bb8a5","Type":"ContainerDied","Data":"bc06db5fb3dbe913cc986da56e3acba863bec906e8dfbc7034c9a008f6bc1809"} Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.014946 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0cfd9438-efa0-458e-b810-942fec5bb8a5","Type":"ContainerDied","Data":"8d6517e88a091c3756e7a2d21a1397c1c4906ca7d8f2945c644b614c3415a017"} Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.014960 4779 scope.go:117] "RemoveContainer" containerID="acef97b2d237d1ce25c808480d8766d4c235ffac7f263268643527485cc6cb2c" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.030501 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0cfd9438-efa0-458e-b810-942fec5bb8a5-scripts\") pod \"0cfd9438-efa0-458e-b810-942fec5bb8a5\" (UID: \"0cfd9438-efa0-458e-b810-942fec5bb8a5\") " Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.030556 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0cfd9438-efa0-458e-b810-942fec5bb8a5-config-data\") pod \"0cfd9438-efa0-458e-b810-942fec5bb8a5\" (UID: \"0cfd9438-efa0-458e-b810-942fec5bb8a5\") " Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.030713 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0cfd9438-efa0-458e-b810-942fec5bb8a5-sg-core-conf-yaml\") pod \"0cfd9438-efa0-458e-b810-942fec5bb8a5\" (UID: \"0cfd9438-efa0-458e-b810-942fec5bb8a5\") " Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.030748 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0cfd9438-efa0-458e-b810-942fec5bb8a5-combined-ca-bundle\") pod \"0cfd9438-efa0-458e-b810-942fec5bb8a5\" (UID: \"0cfd9438-efa0-458e-b810-942fec5bb8a5\") " Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.030789 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0cfd9438-efa0-458e-b810-942fec5bb8a5-run-httpd\") pod \"0cfd9438-efa0-458e-b810-942fec5bb8a5\" (UID: \"0cfd9438-efa0-458e-b810-942fec5bb8a5\") " Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.030849 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0cfd9438-efa0-458e-b810-942fec5bb8a5-log-httpd\") pod \"0cfd9438-efa0-458e-b810-942fec5bb8a5\" (UID: \"0cfd9438-efa0-458e-b810-942fec5bb8a5\") " Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.031152 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0cfd9438-efa0-458e-b810-942fec5bb8a5-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "0cfd9438-efa0-458e-b810-942fec5bb8a5" (UID: "0cfd9438-efa0-458e-b810-942fec5bb8a5"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.031321 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0cfd9438-efa0-458e-b810-942fec5bb8a5-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "0cfd9438-efa0-458e-b810-942fec5bb8a5" (UID: "0cfd9438-efa0-458e-b810-942fec5bb8a5"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.031427 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qt828\" (UniqueName: \"kubernetes.io/projected/0cfd9438-efa0-458e-b810-942fec5bb8a5-kube-api-access-qt828\") pod \"0cfd9438-efa0-458e-b810-942fec5bb8a5\" (UID: \"0cfd9438-efa0-458e-b810-942fec5bb8a5\") " Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.032407 4779 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0cfd9438-efa0-458e-b810-942fec5bb8a5-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.032429 4779 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0cfd9438-efa0-458e-b810-942fec5bb8a5-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.036893 4779 scope.go:117] "RemoveContainer" containerID="4fe0aee225e06750e48ccc7953ea69b9581f3a43cba9f6979b2c33a49edfa4fa" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.039608 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0cfd9438-efa0-458e-b810-942fec5bb8a5-kube-api-access-qt828" (OuterVolumeSpecName: "kube-api-access-qt828") pod "0cfd9438-efa0-458e-b810-942fec5bb8a5" (UID: "0cfd9438-efa0-458e-b810-942fec5bb8a5"). InnerVolumeSpecName "kube-api-access-qt828". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.055231 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0cfd9438-efa0-458e-b810-942fec5bb8a5-scripts" (OuterVolumeSpecName: "scripts") pod "0cfd9438-efa0-458e-b810-942fec5bb8a5" (UID: "0cfd9438-efa0-458e-b810-942fec5bb8a5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.057756 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0cfd9438-efa0-458e-b810-942fec5bb8a5-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "0cfd9438-efa0-458e-b810-942fec5bb8a5" (UID: "0cfd9438-efa0-458e-b810-942fec5bb8a5"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.120347 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0cfd9438-efa0-458e-b810-942fec5bb8a5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0cfd9438-efa0-458e-b810-942fec5bb8a5" (UID: "0cfd9438-efa0-458e-b810-942fec5bb8a5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.135827 4779 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0cfd9438-efa0-458e-b810-942fec5bb8a5-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.135868 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0cfd9438-efa0-458e-b810-942fec5bb8a5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.135881 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qt828\" (UniqueName: \"kubernetes.io/projected/0cfd9438-efa0-458e-b810-942fec5bb8a5-kube-api-access-qt828\") on node \"crc\" DevicePath \"\"" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.135897 4779 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0cfd9438-efa0-458e-b810-942fec5bb8a5-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.149071 4779 scope.go:117] "RemoveContainer" containerID="0a92e50d2300b20b41e802265058451929fcd35e6795853a8e3ed82d24e69972" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.161026 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0cfd9438-efa0-458e-b810-942fec5bb8a5-config-data" (OuterVolumeSpecName: "config-data") pod "0cfd9438-efa0-458e-b810-942fec5bb8a5" (UID: "0cfd9438-efa0-458e-b810-942fec5bb8a5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.168064 4779 scope.go:117] "RemoveContainer" containerID="bc06db5fb3dbe913cc986da56e3acba863bec906e8dfbc7034c9a008f6bc1809" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.189518 4779 scope.go:117] "RemoveContainer" containerID="acef97b2d237d1ce25c808480d8766d4c235ffac7f263268643527485cc6cb2c" Sep 29 09:48:25 crc kubenswrapper[4779]: E0929 09:48:25.189962 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"acef97b2d237d1ce25c808480d8766d4c235ffac7f263268643527485cc6cb2c\": container with ID starting with acef97b2d237d1ce25c808480d8766d4c235ffac7f263268643527485cc6cb2c not found: ID does not exist" containerID="acef97b2d237d1ce25c808480d8766d4c235ffac7f263268643527485cc6cb2c" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.189998 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"acef97b2d237d1ce25c808480d8766d4c235ffac7f263268643527485cc6cb2c"} err="failed to get container status \"acef97b2d237d1ce25c808480d8766d4c235ffac7f263268643527485cc6cb2c\": rpc error: code = NotFound desc = could not find container \"acef97b2d237d1ce25c808480d8766d4c235ffac7f263268643527485cc6cb2c\": container with ID starting with acef97b2d237d1ce25c808480d8766d4c235ffac7f263268643527485cc6cb2c not found: ID does not exist" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.190020 4779 scope.go:117] "RemoveContainer" containerID="4fe0aee225e06750e48ccc7953ea69b9581f3a43cba9f6979b2c33a49edfa4fa" Sep 29 09:48:25 crc kubenswrapper[4779]: E0929 09:48:25.190254 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4fe0aee225e06750e48ccc7953ea69b9581f3a43cba9f6979b2c33a49edfa4fa\": container with ID starting with 4fe0aee225e06750e48ccc7953ea69b9581f3a43cba9f6979b2c33a49edfa4fa not found: ID does not exist" containerID="4fe0aee225e06750e48ccc7953ea69b9581f3a43cba9f6979b2c33a49edfa4fa" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.190317 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4fe0aee225e06750e48ccc7953ea69b9581f3a43cba9f6979b2c33a49edfa4fa"} err="failed to get container status \"4fe0aee225e06750e48ccc7953ea69b9581f3a43cba9f6979b2c33a49edfa4fa\": rpc error: code = NotFound desc = could not find container \"4fe0aee225e06750e48ccc7953ea69b9581f3a43cba9f6979b2c33a49edfa4fa\": container with ID starting with 4fe0aee225e06750e48ccc7953ea69b9581f3a43cba9f6979b2c33a49edfa4fa not found: ID does not exist" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.190333 4779 scope.go:117] "RemoveContainer" containerID="0a92e50d2300b20b41e802265058451929fcd35e6795853a8e3ed82d24e69972" Sep 29 09:48:25 crc kubenswrapper[4779]: E0929 09:48:25.190744 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0a92e50d2300b20b41e802265058451929fcd35e6795853a8e3ed82d24e69972\": container with ID starting with 0a92e50d2300b20b41e802265058451929fcd35e6795853a8e3ed82d24e69972 not found: ID does not exist" containerID="0a92e50d2300b20b41e802265058451929fcd35e6795853a8e3ed82d24e69972" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.190784 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0a92e50d2300b20b41e802265058451929fcd35e6795853a8e3ed82d24e69972"} err="failed to get container status \"0a92e50d2300b20b41e802265058451929fcd35e6795853a8e3ed82d24e69972\": rpc error: code = NotFound desc = could not find container \"0a92e50d2300b20b41e802265058451929fcd35e6795853a8e3ed82d24e69972\": container with ID starting with 0a92e50d2300b20b41e802265058451929fcd35e6795853a8e3ed82d24e69972 not found: ID does not exist" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.190799 4779 scope.go:117] "RemoveContainer" containerID="bc06db5fb3dbe913cc986da56e3acba863bec906e8dfbc7034c9a008f6bc1809" Sep 29 09:48:25 crc kubenswrapper[4779]: E0929 09:48:25.191062 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bc06db5fb3dbe913cc986da56e3acba863bec906e8dfbc7034c9a008f6bc1809\": container with ID starting with bc06db5fb3dbe913cc986da56e3acba863bec906e8dfbc7034c9a008f6bc1809 not found: ID does not exist" containerID="bc06db5fb3dbe913cc986da56e3acba863bec906e8dfbc7034c9a008f6bc1809" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.191115 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bc06db5fb3dbe913cc986da56e3acba863bec906e8dfbc7034c9a008f6bc1809"} err="failed to get container status \"bc06db5fb3dbe913cc986da56e3acba863bec906e8dfbc7034c9a008f6bc1809\": rpc error: code = NotFound desc = could not find container \"bc06db5fb3dbe913cc986da56e3acba863bec906e8dfbc7034c9a008f6bc1809\": container with ID starting with bc06db5fb3dbe913cc986da56e3acba863bec906e8dfbc7034c9a008f6bc1809 not found: ID does not exist" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.191130 4779 scope.go:117] "RemoveContainer" containerID="acef97b2d237d1ce25c808480d8766d4c235ffac7f263268643527485cc6cb2c" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.191613 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"acef97b2d237d1ce25c808480d8766d4c235ffac7f263268643527485cc6cb2c"} err="failed to get container status \"acef97b2d237d1ce25c808480d8766d4c235ffac7f263268643527485cc6cb2c\": rpc error: code = NotFound desc = could not find container \"acef97b2d237d1ce25c808480d8766d4c235ffac7f263268643527485cc6cb2c\": container with ID starting with acef97b2d237d1ce25c808480d8766d4c235ffac7f263268643527485cc6cb2c not found: ID does not exist" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.191662 4779 scope.go:117] "RemoveContainer" containerID="4fe0aee225e06750e48ccc7953ea69b9581f3a43cba9f6979b2c33a49edfa4fa" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.191926 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4fe0aee225e06750e48ccc7953ea69b9581f3a43cba9f6979b2c33a49edfa4fa"} err="failed to get container status \"4fe0aee225e06750e48ccc7953ea69b9581f3a43cba9f6979b2c33a49edfa4fa\": rpc error: code = NotFound desc = could not find container \"4fe0aee225e06750e48ccc7953ea69b9581f3a43cba9f6979b2c33a49edfa4fa\": container with ID starting with 4fe0aee225e06750e48ccc7953ea69b9581f3a43cba9f6979b2c33a49edfa4fa not found: ID does not exist" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.191949 4779 scope.go:117] "RemoveContainer" containerID="0a92e50d2300b20b41e802265058451929fcd35e6795853a8e3ed82d24e69972" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.192190 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0a92e50d2300b20b41e802265058451929fcd35e6795853a8e3ed82d24e69972"} err="failed to get container status \"0a92e50d2300b20b41e802265058451929fcd35e6795853a8e3ed82d24e69972\": rpc error: code = NotFound desc = could not find container \"0a92e50d2300b20b41e802265058451929fcd35e6795853a8e3ed82d24e69972\": container with ID starting with 0a92e50d2300b20b41e802265058451929fcd35e6795853a8e3ed82d24e69972 not found: ID does not exist" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.192206 4779 scope.go:117] "RemoveContainer" containerID="bc06db5fb3dbe913cc986da56e3acba863bec906e8dfbc7034c9a008f6bc1809" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.192368 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bc06db5fb3dbe913cc986da56e3acba863bec906e8dfbc7034c9a008f6bc1809"} err="failed to get container status \"bc06db5fb3dbe913cc986da56e3acba863bec906e8dfbc7034c9a008f6bc1809\": rpc error: code = NotFound desc = could not find container \"bc06db5fb3dbe913cc986da56e3acba863bec906e8dfbc7034c9a008f6bc1809\": container with ID starting with bc06db5fb3dbe913cc986da56e3acba863bec906e8dfbc7034c9a008f6bc1809 not found: ID does not exist" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.192391 4779 scope.go:117] "RemoveContainer" containerID="acef97b2d237d1ce25c808480d8766d4c235ffac7f263268643527485cc6cb2c" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.192588 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"acef97b2d237d1ce25c808480d8766d4c235ffac7f263268643527485cc6cb2c"} err="failed to get container status \"acef97b2d237d1ce25c808480d8766d4c235ffac7f263268643527485cc6cb2c\": rpc error: code = NotFound desc = could not find container \"acef97b2d237d1ce25c808480d8766d4c235ffac7f263268643527485cc6cb2c\": container with ID starting with acef97b2d237d1ce25c808480d8766d4c235ffac7f263268643527485cc6cb2c not found: ID does not exist" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.192606 4779 scope.go:117] "RemoveContainer" containerID="4fe0aee225e06750e48ccc7953ea69b9581f3a43cba9f6979b2c33a49edfa4fa" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.192767 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4fe0aee225e06750e48ccc7953ea69b9581f3a43cba9f6979b2c33a49edfa4fa"} err="failed to get container status \"4fe0aee225e06750e48ccc7953ea69b9581f3a43cba9f6979b2c33a49edfa4fa\": rpc error: code = NotFound desc = could not find container \"4fe0aee225e06750e48ccc7953ea69b9581f3a43cba9f6979b2c33a49edfa4fa\": container with ID starting with 4fe0aee225e06750e48ccc7953ea69b9581f3a43cba9f6979b2c33a49edfa4fa not found: ID does not exist" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.192783 4779 scope.go:117] "RemoveContainer" containerID="0a92e50d2300b20b41e802265058451929fcd35e6795853a8e3ed82d24e69972" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.193187 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0a92e50d2300b20b41e802265058451929fcd35e6795853a8e3ed82d24e69972"} err="failed to get container status \"0a92e50d2300b20b41e802265058451929fcd35e6795853a8e3ed82d24e69972\": rpc error: code = NotFound desc = could not find container \"0a92e50d2300b20b41e802265058451929fcd35e6795853a8e3ed82d24e69972\": container with ID starting with 0a92e50d2300b20b41e802265058451929fcd35e6795853a8e3ed82d24e69972 not found: ID does not exist" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.193205 4779 scope.go:117] "RemoveContainer" containerID="bc06db5fb3dbe913cc986da56e3acba863bec906e8dfbc7034c9a008f6bc1809" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.193434 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bc06db5fb3dbe913cc986da56e3acba863bec906e8dfbc7034c9a008f6bc1809"} err="failed to get container status \"bc06db5fb3dbe913cc986da56e3acba863bec906e8dfbc7034c9a008f6bc1809\": rpc error: code = NotFound desc = could not find container \"bc06db5fb3dbe913cc986da56e3acba863bec906e8dfbc7034c9a008f6bc1809\": container with ID starting with bc06db5fb3dbe913cc986da56e3acba863bec906e8dfbc7034c9a008f6bc1809 not found: ID does not exist" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.193453 4779 scope.go:117] "RemoveContainer" containerID="acef97b2d237d1ce25c808480d8766d4c235ffac7f263268643527485cc6cb2c" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.193653 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"acef97b2d237d1ce25c808480d8766d4c235ffac7f263268643527485cc6cb2c"} err="failed to get container status \"acef97b2d237d1ce25c808480d8766d4c235ffac7f263268643527485cc6cb2c\": rpc error: code = NotFound desc = could not find container \"acef97b2d237d1ce25c808480d8766d4c235ffac7f263268643527485cc6cb2c\": container with ID starting with acef97b2d237d1ce25c808480d8766d4c235ffac7f263268643527485cc6cb2c not found: ID does not exist" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.193670 4779 scope.go:117] "RemoveContainer" containerID="4fe0aee225e06750e48ccc7953ea69b9581f3a43cba9f6979b2c33a49edfa4fa" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.193886 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4fe0aee225e06750e48ccc7953ea69b9581f3a43cba9f6979b2c33a49edfa4fa"} err="failed to get container status \"4fe0aee225e06750e48ccc7953ea69b9581f3a43cba9f6979b2c33a49edfa4fa\": rpc error: code = NotFound desc = could not find container \"4fe0aee225e06750e48ccc7953ea69b9581f3a43cba9f6979b2c33a49edfa4fa\": container with ID starting with 4fe0aee225e06750e48ccc7953ea69b9581f3a43cba9f6979b2c33a49edfa4fa not found: ID does not exist" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.193929 4779 scope.go:117] "RemoveContainer" containerID="0a92e50d2300b20b41e802265058451929fcd35e6795853a8e3ed82d24e69972" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.194118 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0a92e50d2300b20b41e802265058451929fcd35e6795853a8e3ed82d24e69972"} err="failed to get container status \"0a92e50d2300b20b41e802265058451929fcd35e6795853a8e3ed82d24e69972\": rpc error: code = NotFound desc = could not find container \"0a92e50d2300b20b41e802265058451929fcd35e6795853a8e3ed82d24e69972\": container with ID starting with 0a92e50d2300b20b41e802265058451929fcd35e6795853a8e3ed82d24e69972 not found: ID does not exist" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.194140 4779 scope.go:117] "RemoveContainer" containerID="bc06db5fb3dbe913cc986da56e3acba863bec906e8dfbc7034c9a008f6bc1809" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.194296 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bc06db5fb3dbe913cc986da56e3acba863bec906e8dfbc7034c9a008f6bc1809"} err="failed to get container status \"bc06db5fb3dbe913cc986da56e3acba863bec906e8dfbc7034c9a008f6bc1809\": rpc error: code = NotFound desc = could not find container \"bc06db5fb3dbe913cc986da56e3acba863bec906e8dfbc7034c9a008f6bc1809\": container with ID starting with bc06db5fb3dbe913cc986da56e3acba863bec906e8dfbc7034c9a008f6bc1809 not found: ID does not exist" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.237878 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0cfd9438-efa0-458e-b810-942fec5bb8a5-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.379507 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.389348 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.405107 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 09:48:25 crc kubenswrapper[4779]: E0929 09:48:25.405419 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0cfd9438-efa0-458e-b810-942fec5bb8a5" containerName="proxy-httpd" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.405435 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="0cfd9438-efa0-458e-b810-942fec5bb8a5" containerName="proxy-httpd" Sep 29 09:48:25 crc kubenswrapper[4779]: E0929 09:48:25.405449 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1f1c031-af73-4248-b0ee-6c9a8e6c2a82" containerName="neutron-api" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.405456 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1f1c031-af73-4248-b0ee-6c9a8e6c2a82" containerName="neutron-api" Sep 29 09:48:25 crc kubenswrapper[4779]: E0929 09:48:25.405470 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0cfd9438-efa0-458e-b810-942fec5bb8a5" containerName="sg-core" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.405476 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="0cfd9438-efa0-458e-b810-942fec5bb8a5" containerName="sg-core" Sep 29 09:48:25 crc kubenswrapper[4779]: E0929 09:48:25.405491 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0cfd9438-efa0-458e-b810-942fec5bb8a5" containerName="ceilometer-central-agent" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.405497 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="0cfd9438-efa0-458e-b810-942fec5bb8a5" containerName="ceilometer-central-agent" Sep 29 09:48:25 crc kubenswrapper[4779]: E0929 09:48:25.405515 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1f1c031-af73-4248-b0ee-6c9a8e6c2a82" containerName="neutron-httpd" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.405520 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1f1c031-af73-4248-b0ee-6c9a8e6c2a82" containerName="neutron-httpd" Sep 29 09:48:25 crc kubenswrapper[4779]: E0929 09:48:25.405532 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0cfd9438-efa0-458e-b810-942fec5bb8a5" containerName="ceilometer-notification-agent" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.405537 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="0cfd9438-efa0-458e-b810-942fec5bb8a5" containerName="ceilometer-notification-agent" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.405688 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="0cfd9438-efa0-458e-b810-942fec5bb8a5" containerName="ceilometer-notification-agent" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.405705 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="0cfd9438-efa0-458e-b810-942fec5bb8a5" containerName="proxy-httpd" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.405719 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="c1f1c031-af73-4248-b0ee-6c9a8e6c2a82" containerName="neutron-api" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.405730 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="c1f1c031-af73-4248-b0ee-6c9a8e6c2a82" containerName="neutron-httpd" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.405739 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="0cfd9438-efa0-458e-b810-942fec5bb8a5" containerName="sg-core" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.405752 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="0cfd9438-efa0-458e-b810-942fec5bb8a5" containerName="ceilometer-central-agent" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.407739 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.409734 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.413223 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.425212 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.544249 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e9c6ab7-50a1-49d3-a6df-71646c0e9a89-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7e9c6ab7-50a1-49d3-a6df-71646c0e9a89\") " pod="openstack/ceilometer-0" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.544422 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q6dpk\" (UniqueName: \"kubernetes.io/projected/7e9c6ab7-50a1-49d3-a6df-71646c0e9a89-kube-api-access-q6dpk\") pod \"ceilometer-0\" (UID: \"7e9c6ab7-50a1-49d3-a6df-71646c0e9a89\") " pod="openstack/ceilometer-0" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.544457 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7e9c6ab7-50a1-49d3-a6df-71646c0e9a89-run-httpd\") pod \"ceilometer-0\" (UID: \"7e9c6ab7-50a1-49d3-a6df-71646c0e9a89\") " pod="openstack/ceilometer-0" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.544586 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7e9c6ab7-50a1-49d3-a6df-71646c0e9a89-log-httpd\") pod \"ceilometer-0\" (UID: \"7e9c6ab7-50a1-49d3-a6df-71646c0e9a89\") " pod="openstack/ceilometer-0" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.544621 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e9c6ab7-50a1-49d3-a6df-71646c0e9a89-config-data\") pod \"ceilometer-0\" (UID: \"7e9c6ab7-50a1-49d3-a6df-71646c0e9a89\") " pod="openstack/ceilometer-0" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.544661 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e9c6ab7-50a1-49d3-a6df-71646c0e9a89-scripts\") pod \"ceilometer-0\" (UID: \"7e9c6ab7-50a1-49d3-a6df-71646c0e9a89\") " pod="openstack/ceilometer-0" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.544696 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7e9c6ab7-50a1-49d3-a6df-71646c0e9a89-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7e9c6ab7-50a1-49d3-a6df-71646c0e9a89\") " pod="openstack/ceilometer-0" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.646140 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e9c6ab7-50a1-49d3-a6df-71646c0e9a89-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7e9c6ab7-50a1-49d3-a6df-71646c0e9a89\") " pod="openstack/ceilometer-0" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.646819 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q6dpk\" (UniqueName: \"kubernetes.io/projected/7e9c6ab7-50a1-49d3-a6df-71646c0e9a89-kube-api-access-q6dpk\") pod \"ceilometer-0\" (UID: \"7e9c6ab7-50a1-49d3-a6df-71646c0e9a89\") " pod="openstack/ceilometer-0" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.647016 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7e9c6ab7-50a1-49d3-a6df-71646c0e9a89-run-httpd\") pod \"ceilometer-0\" (UID: \"7e9c6ab7-50a1-49d3-a6df-71646c0e9a89\") " pod="openstack/ceilometer-0" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.647319 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7e9c6ab7-50a1-49d3-a6df-71646c0e9a89-log-httpd\") pod \"ceilometer-0\" (UID: \"7e9c6ab7-50a1-49d3-a6df-71646c0e9a89\") " pod="openstack/ceilometer-0" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.647475 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e9c6ab7-50a1-49d3-a6df-71646c0e9a89-config-data\") pod \"ceilometer-0\" (UID: \"7e9c6ab7-50a1-49d3-a6df-71646c0e9a89\") " pod="openstack/ceilometer-0" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.647588 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e9c6ab7-50a1-49d3-a6df-71646c0e9a89-scripts\") pod \"ceilometer-0\" (UID: \"7e9c6ab7-50a1-49d3-a6df-71646c0e9a89\") " pod="openstack/ceilometer-0" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.647686 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7e9c6ab7-50a1-49d3-a6df-71646c0e9a89-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7e9c6ab7-50a1-49d3-a6df-71646c0e9a89\") " pod="openstack/ceilometer-0" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.647604 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7e9c6ab7-50a1-49d3-a6df-71646c0e9a89-run-httpd\") pod \"ceilometer-0\" (UID: \"7e9c6ab7-50a1-49d3-a6df-71646c0e9a89\") " pod="openstack/ceilometer-0" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.647837 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7e9c6ab7-50a1-49d3-a6df-71646c0e9a89-log-httpd\") pod \"ceilometer-0\" (UID: \"7e9c6ab7-50a1-49d3-a6df-71646c0e9a89\") " pod="openstack/ceilometer-0" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.652088 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e9c6ab7-50a1-49d3-a6df-71646c0e9a89-scripts\") pod \"ceilometer-0\" (UID: \"7e9c6ab7-50a1-49d3-a6df-71646c0e9a89\") " pod="openstack/ceilometer-0" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.652336 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e9c6ab7-50a1-49d3-a6df-71646c0e9a89-config-data\") pod \"ceilometer-0\" (UID: \"7e9c6ab7-50a1-49d3-a6df-71646c0e9a89\") " pod="openstack/ceilometer-0" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.653580 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7e9c6ab7-50a1-49d3-a6df-71646c0e9a89-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7e9c6ab7-50a1-49d3-a6df-71646c0e9a89\") " pod="openstack/ceilometer-0" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.663433 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e9c6ab7-50a1-49d3-a6df-71646c0e9a89-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7e9c6ab7-50a1-49d3-a6df-71646c0e9a89\") " pod="openstack/ceilometer-0" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.664434 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q6dpk\" (UniqueName: \"kubernetes.io/projected/7e9c6ab7-50a1-49d3-a6df-71646c0e9a89-kube-api-access-q6dpk\") pod \"ceilometer-0\" (UID: \"7e9c6ab7-50a1-49d3-a6df-71646c0e9a89\") " pod="openstack/ceilometer-0" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.713524 4779 scope.go:117] "RemoveContainer" containerID="04ec2cc73b2c81ba077c7fc00e233e5df22795dfb270435052946778c9421ed3" Sep 29 09:48:25 crc kubenswrapper[4779]: E0929 09:48:25.713809 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-decision-engine\" with CrashLoopBackOff: \"back-off 40s restarting failed container=watcher-decision-engine pod=watcher-decision-engine-0_openstack(f67e636b-969b-48ee-bbec-3d8b38b22274)\"" pod="openstack/watcher-decision-engine-0" podUID="f67e636b-969b-48ee-bbec-3d8b38b22274" Sep 29 09:48:25 crc kubenswrapper[4779]: I0929 09:48:25.724997 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 09:48:26 crc kubenswrapper[4779]: I0929 09:48:26.245794 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 09:48:26 crc kubenswrapper[4779]: W0929 09:48:26.256484 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7e9c6ab7_50a1_49d3_a6df_71646c0e9a89.slice/crio-26c60eb886a81886a03f5eae5fe86216473255dd2d8924539fc395dc319c2a33 WatchSource:0}: Error finding container 26c60eb886a81886a03f5eae5fe86216473255dd2d8924539fc395dc319c2a33: Status 404 returned error can't find the container with id 26c60eb886a81886a03f5eae5fe86216473255dd2d8924539fc395dc319c2a33 Sep 29 09:48:26 crc kubenswrapper[4779]: I0929 09:48:26.732575 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0cfd9438-efa0-458e-b810-942fec5bb8a5" path="/var/lib/kubelet/pods/0cfd9438-efa0-458e-b810-942fec5bb8a5/volumes" Sep 29 09:48:27 crc kubenswrapper[4779]: I0929 09:48:27.043523 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7e9c6ab7-50a1-49d3-a6df-71646c0e9a89","Type":"ContainerStarted","Data":"4c56ffa6cad33de34ec06f2502ca20b3287f4ecc3db23245a0f665a33a3db443"} Sep 29 09:48:27 crc kubenswrapper[4779]: I0929 09:48:27.043566 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7e9c6ab7-50a1-49d3-a6df-71646c0e9a89","Type":"ContainerStarted","Data":"063c04725d9702400807cf9bc08a7e659dc4aadc81922061b00bf5cae328734d"} Sep 29 09:48:27 crc kubenswrapper[4779]: I0929 09:48:27.043575 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7e9c6ab7-50a1-49d3-a6df-71646c0e9a89","Type":"ContainerStarted","Data":"26c60eb886a81886a03f5eae5fe86216473255dd2d8924539fc395dc319c2a33"} Sep 29 09:48:28 crc kubenswrapper[4779]: I0929 09:48:28.057612 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7e9c6ab7-50a1-49d3-a6df-71646c0e9a89","Type":"ContainerStarted","Data":"b67d0d04104a62c7e59b64734f81150e97dd4eebd53d2d6143eea4c2e81d9030"} Sep 29 09:48:31 crc kubenswrapper[4779]: I0929 09:48:31.090792 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7e9c6ab7-50a1-49d3-a6df-71646c0e9a89","Type":"ContainerStarted","Data":"6601568f820f36b29e5ce19dc8d3c63a5bdf7625288e80c9c82ca6a710f46427"} Sep 29 09:48:31 crc kubenswrapper[4779]: I0929 09:48:31.140288 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.15300931 podStartE2EDuration="6.140254659s" podCreationTimestamp="2025-09-29 09:48:25 +0000 UTC" firstStartedPulling="2025-09-29 09:48:26.261422831 +0000 UTC m=+1138.242746735" lastFinishedPulling="2025-09-29 09:48:30.24866816 +0000 UTC m=+1142.229992084" observedRunningTime="2025-09-29 09:48:31.132126447 +0000 UTC m=+1143.113450421" watchObservedRunningTime="2025-09-29 09:48:31.140254659 +0000 UTC m=+1143.121578613" Sep 29 09:48:31 crc kubenswrapper[4779]: I0929 09:48:31.678477 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="c32af95a-c3b1-4cba-9c05-d5d787e3ec04" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.169:3000/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 09:48:32 crc kubenswrapper[4779]: I0929 09:48:32.101872 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 09:48:37 crc kubenswrapper[4779]: I0929 09:48:37.150136 4779 generic.go:334] "Generic (PLEG): container finished" podID="d8fc4bcf-88f0-4a27-97e2-e9b452a630c0" containerID="c8f3399cd4b08d51e0415f62410d0e9b9bd65963ad041a47277217410d338f13" exitCode=0 Sep 29 09:48:37 crc kubenswrapper[4779]: I0929 09:48:37.150673 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-n7dpw" event={"ID":"d8fc4bcf-88f0-4a27-97e2-e9b452a630c0","Type":"ContainerDied","Data":"c8f3399cd4b08d51e0415f62410d0e9b9bd65963ad041a47277217410d338f13"} Sep 29 09:48:38 crc kubenswrapper[4779]: I0929 09:48:38.574991 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-n7dpw" Sep 29 09:48:38 crc kubenswrapper[4779]: I0929 09:48:38.678985 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8fc4bcf-88f0-4a27-97e2-e9b452a630c0-combined-ca-bundle\") pod \"d8fc4bcf-88f0-4a27-97e2-e9b452a630c0\" (UID: \"d8fc4bcf-88f0-4a27-97e2-e9b452a630c0\") " Sep 29 09:48:38 crc kubenswrapper[4779]: I0929 09:48:38.679083 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4kwr6\" (UniqueName: \"kubernetes.io/projected/d8fc4bcf-88f0-4a27-97e2-e9b452a630c0-kube-api-access-4kwr6\") pod \"d8fc4bcf-88f0-4a27-97e2-e9b452a630c0\" (UID: \"d8fc4bcf-88f0-4a27-97e2-e9b452a630c0\") " Sep 29 09:48:38 crc kubenswrapper[4779]: I0929 09:48:38.679330 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d8fc4bcf-88f0-4a27-97e2-e9b452a630c0-scripts\") pod \"d8fc4bcf-88f0-4a27-97e2-e9b452a630c0\" (UID: \"d8fc4bcf-88f0-4a27-97e2-e9b452a630c0\") " Sep 29 09:48:38 crc kubenswrapper[4779]: I0929 09:48:38.680087 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d8fc4bcf-88f0-4a27-97e2-e9b452a630c0-config-data\") pod \"d8fc4bcf-88f0-4a27-97e2-e9b452a630c0\" (UID: \"d8fc4bcf-88f0-4a27-97e2-e9b452a630c0\") " Sep 29 09:48:38 crc kubenswrapper[4779]: I0929 09:48:38.684609 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d8fc4bcf-88f0-4a27-97e2-e9b452a630c0-scripts" (OuterVolumeSpecName: "scripts") pod "d8fc4bcf-88f0-4a27-97e2-e9b452a630c0" (UID: "d8fc4bcf-88f0-4a27-97e2-e9b452a630c0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:48:38 crc kubenswrapper[4779]: I0929 09:48:38.684670 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d8fc4bcf-88f0-4a27-97e2-e9b452a630c0-kube-api-access-4kwr6" (OuterVolumeSpecName: "kube-api-access-4kwr6") pod "d8fc4bcf-88f0-4a27-97e2-e9b452a630c0" (UID: "d8fc4bcf-88f0-4a27-97e2-e9b452a630c0"). InnerVolumeSpecName "kube-api-access-4kwr6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:48:38 crc kubenswrapper[4779]: I0929 09:48:38.704653 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d8fc4bcf-88f0-4a27-97e2-e9b452a630c0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d8fc4bcf-88f0-4a27-97e2-e9b452a630c0" (UID: "d8fc4bcf-88f0-4a27-97e2-e9b452a630c0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:48:38 crc kubenswrapper[4779]: I0929 09:48:38.716263 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d8fc4bcf-88f0-4a27-97e2-e9b452a630c0-config-data" (OuterVolumeSpecName: "config-data") pod "d8fc4bcf-88f0-4a27-97e2-e9b452a630c0" (UID: "d8fc4bcf-88f0-4a27-97e2-e9b452a630c0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:48:38 crc kubenswrapper[4779]: I0929 09:48:38.734935 4779 scope.go:117] "RemoveContainer" containerID="04ec2cc73b2c81ba077c7fc00e233e5df22795dfb270435052946778c9421ed3" Sep 29 09:48:38 crc kubenswrapper[4779]: E0929 09:48:38.735490 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-decision-engine\" with CrashLoopBackOff: \"back-off 40s restarting failed container=watcher-decision-engine pod=watcher-decision-engine-0_openstack(f67e636b-969b-48ee-bbec-3d8b38b22274)\"" pod="openstack/watcher-decision-engine-0" podUID="f67e636b-969b-48ee-bbec-3d8b38b22274" Sep 29 09:48:38 crc kubenswrapper[4779]: I0929 09:48:38.782211 4779 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d8fc4bcf-88f0-4a27-97e2-e9b452a630c0-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 09:48:38 crc kubenswrapper[4779]: I0929 09:48:38.782244 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d8fc4bcf-88f0-4a27-97e2-e9b452a630c0-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 09:48:38 crc kubenswrapper[4779]: I0929 09:48:38.782258 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8fc4bcf-88f0-4a27-97e2-e9b452a630c0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 09:48:38 crc kubenswrapper[4779]: I0929 09:48:38.782271 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4kwr6\" (UniqueName: \"kubernetes.io/projected/d8fc4bcf-88f0-4a27-97e2-e9b452a630c0-kube-api-access-4kwr6\") on node \"crc\" DevicePath \"\"" Sep 29 09:48:39 crc kubenswrapper[4779]: I0929 09:48:39.166818 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-n7dpw" event={"ID":"d8fc4bcf-88f0-4a27-97e2-e9b452a630c0","Type":"ContainerDied","Data":"19d712a00e48d22bcdc26064fea334d7b189989421aca98e2a28f5e33ed0d3ea"} Sep 29 09:48:39 crc kubenswrapper[4779]: I0929 09:48:39.167105 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="19d712a00e48d22bcdc26064fea334d7b189989421aca98e2a28f5e33ed0d3ea" Sep 29 09:48:39 crc kubenswrapper[4779]: I0929 09:48:39.166860 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-n7dpw" Sep 29 09:48:39 crc kubenswrapper[4779]: I0929 09:48:39.254563 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 29 09:48:39 crc kubenswrapper[4779]: E0929 09:48:39.255053 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8fc4bcf-88f0-4a27-97e2-e9b452a630c0" containerName="nova-cell0-conductor-db-sync" Sep 29 09:48:39 crc kubenswrapper[4779]: I0929 09:48:39.255076 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8fc4bcf-88f0-4a27-97e2-e9b452a630c0" containerName="nova-cell0-conductor-db-sync" Sep 29 09:48:39 crc kubenswrapper[4779]: I0929 09:48:39.255334 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="d8fc4bcf-88f0-4a27-97e2-e9b452a630c0" containerName="nova-cell0-conductor-db-sync" Sep 29 09:48:39 crc kubenswrapper[4779]: I0929 09:48:39.256149 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Sep 29 09:48:39 crc kubenswrapper[4779]: I0929 09:48:39.257888 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Sep 29 09:48:39 crc kubenswrapper[4779]: I0929 09:48:39.258539 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-8dh5h" Sep 29 09:48:39 crc kubenswrapper[4779]: I0929 09:48:39.265236 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 29 09:48:39 crc kubenswrapper[4779]: I0929 09:48:39.395599 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4cc3c62-fa0a-4be5-b2e9-2ae9df18138e-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"a4cc3c62-fa0a-4be5-b2e9-2ae9df18138e\") " pod="openstack/nova-cell0-conductor-0" Sep 29 09:48:39 crc kubenswrapper[4779]: I0929 09:48:39.395730 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mkb87\" (UniqueName: \"kubernetes.io/projected/a4cc3c62-fa0a-4be5-b2e9-2ae9df18138e-kube-api-access-mkb87\") pod \"nova-cell0-conductor-0\" (UID: \"a4cc3c62-fa0a-4be5-b2e9-2ae9df18138e\") " pod="openstack/nova-cell0-conductor-0" Sep 29 09:48:39 crc kubenswrapper[4779]: I0929 09:48:39.395795 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a4cc3c62-fa0a-4be5-b2e9-2ae9df18138e-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"a4cc3c62-fa0a-4be5-b2e9-2ae9df18138e\") " pod="openstack/nova-cell0-conductor-0" Sep 29 09:48:39 crc kubenswrapper[4779]: I0929 09:48:39.497072 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mkb87\" (UniqueName: \"kubernetes.io/projected/a4cc3c62-fa0a-4be5-b2e9-2ae9df18138e-kube-api-access-mkb87\") pod \"nova-cell0-conductor-0\" (UID: \"a4cc3c62-fa0a-4be5-b2e9-2ae9df18138e\") " pod="openstack/nova-cell0-conductor-0" Sep 29 09:48:39 crc kubenswrapper[4779]: I0929 09:48:39.497191 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a4cc3c62-fa0a-4be5-b2e9-2ae9df18138e-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"a4cc3c62-fa0a-4be5-b2e9-2ae9df18138e\") " pod="openstack/nova-cell0-conductor-0" Sep 29 09:48:39 crc kubenswrapper[4779]: I0929 09:48:39.497323 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4cc3c62-fa0a-4be5-b2e9-2ae9df18138e-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"a4cc3c62-fa0a-4be5-b2e9-2ae9df18138e\") " pod="openstack/nova-cell0-conductor-0" Sep 29 09:48:39 crc kubenswrapper[4779]: I0929 09:48:39.501312 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4cc3c62-fa0a-4be5-b2e9-2ae9df18138e-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"a4cc3c62-fa0a-4be5-b2e9-2ae9df18138e\") " pod="openstack/nova-cell0-conductor-0" Sep 29 09:48:39 crc kubenswrapper[4779]: I0929 09:48:39.502763 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a4cc3c62-fa0a-4be5-b2e9-2ae9df18138e-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"a4cc3c62-fa0a-4be5-b2e9-2ae9df18138e\") " pod="openstack/nova-cell0-conductor-0" Sep 29 09:48:39 crc kubenswrapper[4779]: I0929 09:48:39.513579 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mkb87\" (UniqueName: \"kubernetes.io/projected/a4cc3c62-fa0a-4be5-b2e9-2ae9df18138e-kube-api-access-mkb87\") pod \"nova-cell0-conductor-0\" (UID: \"a4cc3c62-fa0a-4be5-b2e9-2ae9df18138e\") " pod="openstack/nova-cell0-conductor-0" Sep 29 09:48:39 crc kubenswrapper[4779]: I0929 09:48:39.606860 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Sep 29 09:48:40 crc kubenswrapper[4779]: I0929 09:48:40.056800 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 29 09:48:40 crc kubenswrapper[4779]: W0929 09:48:40.062090 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda4cc3c62_fa0a_4be5_b2e9_2ae9df18138e.slice/crio-9f0bb7b087a749a98fcde87c5d46ddbe13b136339f609db3887c5d54983a97d2 WatchSource:0}: Error finding container 9f0bb7b087a749a98fcde87c5d46ddbe13b136339f609db3887c5d54983a97d2: Status 404 returned error can't find the container with id 9f0bb7b087a749a98fcde87c5d46ddbe13b136339f609db3887c5d54983a97d2 Sep 29 09:48:40 crc kubenswrapper[4779]: I0929 09:48:40.179640 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"a4cc3c62-fa0a-4be5-b2e9-2ae9df18138e","Type":"ContainerStarted","Data":"9f0bb7b087a749a98fcde87c5d46ddbe13b136339f609db3887c5d54983a97d2"} Sep 29 09:48:41 crc kubenswrapper[4779]: I0929 09:48:41.191702 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"a4cc3c62-fa0a-4be5-b2e9-2ae9df18138e","Type":"ContainerStarted","Data":"a1b37a8210accceca6c07964b31d0280e2102c076bf5405a6ab3b9f85961c069"} Sep 29 09:48:41 crc kubenswrapper[4779]: I0929 09:48:41.191872 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Sep 29 09:48:41 crc kubenswrapper[4779]: I0929 09:48:41.216396 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.216379538 podStartE2EDuration="2.216379538s" podCreationTimestamp="2025-09-29 09:48:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:48:41.210230282 +0000 UTC m=+1153.191554196" watchObservedRunningTime="2025-09-29 09:48:41.216379538 +0000 UTC m=+1153.197703442" Sep 29 09:48:49 crc kubenswrapper[4779]: I0929 09:48:49.644113 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.194602 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-p6zkw"] Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.196290 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-p6zkw" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.198962 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.199197 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.212038 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-p6zkw"] Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.305939 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db25293b-28ae-40dc-a75f-86de34677919-config-data\") pod \"nova-cell0-cell-mapping-p6zkw\" (UID: \"db25293b-28ae-40dc-a75f-86de34677919\") " pod="openstack/nova-cell0-cell-mapping-p6zkw" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.306034 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db25293b-28ae-40dc-a75f-86de34677919-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-p6zkw\" (UID: \"db25293b-28ae-40dc-a75f-86de34677919\") " pod="openstack/nova-cell0-cell-mapping-p6zkw" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.306062 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/db25293b-28ae-40dc-a75f-86de34677919-scripts\") pod \"nova-cell0-cell-mapping-p6zkw\" (UID: \"db25293b-28ae-40dc-a75f-86de34677919\") " pod="openstack/nova-cell0-cell-mapping-p6zkw" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.306099 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m4skp\" (UniqueName: \"kubernetes.io/projected/db25293b-28ae-40dc-a75f-86de34677919-kube-api-access-m4skp\") pod \"nova-cell0-cell-mapping-p6zkw\" (UID: \"db25293b-28ae-40dc-a75f-86de34677919\") " pod="openstack/nova-cell0-cell-mapping-p6zkw" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.367023 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.373495 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.379221 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.385150 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.406452 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.408036 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.408343 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db25293b-28ae-40dc-a75f-86de34677919-config-data\") pod \"nova-cell0-cell-mapping-p6zkw\" (UID: \"db25293b-28ae-40dc-a75f-86de34677919\") " pod="openstack/nova-cell0-cell-mapping-p6zkw" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.408439 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db25293b-28ae-40dc-a75f-86de34677919-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-p6zkw\" (UID: \"db25293b-28ae-40dc-a75f-86de34677919\") " pod="openstack/nova-cell0-cell-mapping-p6zkw" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.408468 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/db25293b-28ae-40dc-a75f-86de34677919-scripts\") pod \"nova-cell0-cell-mapping-p6zkw\" (UID: \"db25293b-28ae-40dc-a75f-86de34677919\") " pod="openstack/nova-cell0-cell-mapping-p6zkw" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.408514 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m4skp\" (UniqueName: \"kubernetes.io/projected/db25293b-28ae-40dc-a75f-86de34677919-kube-api-access-m4skp\") pod \"nova-cell0-cell-mapping-p6zkw\" (UID: \"db25293b-28ae-40dc-a75f-86de34677919\") " pod="openstack/nova-cell0-cell-mapping-p6zkw" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.423215 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/db25293b-28ae-40dc-a75f-86de34677919-scripts\") pod \"nova-cell0-cell-mapping-p6zkw\" (UID: \"db25293b-28ae-40dc-a75f-86de34677919\") " pod="openstack/nova-cell0-cell-mapping-p6zkw" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.423584 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db25293b-28ae-40dc-a75f-86de34677919-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-p6zkw\" (UID: \"db25293b-28ae-40dc-a75f-86de34677919\") " pod="openstack/nova-cell0-cell-mapping-p6zkw" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.437712 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db25293b-28ae-40dc-a75f-86de34677919-config-data\") pod \"nova-cell0-cell-mapping-p6zkw\" (UID: \"db25293b-28ae-40dc-a75f-86de34677919\") " pod="openstack/nova-cell0-cell-mapping-p6zkw" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.438728 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.514060 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/797a0428-592e-44f8-a1c4-78275dab99c7-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"797a0428-592e-44f8-a1c4-78275dab99c7\") " pod="openstack/nova-api-0" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.514480 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/636b4304-ddba-47e8-95de-5549774d728f-logs\") pod \"nova-metadata-0\" (UID: \"636b4304-ddba-47e8-95de-5549774d728f\") " pod="openstack/nova-metadata-0" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.514530 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/797a0428-592e-44f8-a1c4-78275dab99c7-logs\") pod \"nova-api-0\" (UID: \"797a0428-592e-44f8-a1c4-78275dab99c7\") " pod="openstack/nova-api-0" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.514593 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-95m7x\" (UniqueName: \"kubernetes.io/projected/636b4304-ddba-47e8-95de-5549774d728f-kube-api-access-95m7x\") pod \"nova-metadata-0\" (UID: \"636b4304-ddba-47e8-95de-5549774d728f\") " pod="openstack/nova-metadata-0" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.514656 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/636b4304-ddba-47e8-95de-5549774d728f-config-data\") pod \"nova-metadata-0\" (UID: \"636b4304-ddba-47e8-95de-5549774d728f\") " pod="openstack/nova-metadata-0" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.514709 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/636b4304-ddba-47e8-95de-5549774d728f-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"636b4304-ddba-47e8-95de-5549774d728f\") " pod="openstack/nova-metadata-0" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.514758 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/797a0428-592e-44f8-a1c4-78275dab99c7-config-data\") pod \"nova-api-0\" (UID: \"797a0428-592e-44f8-a1c4-78275dab99c7\") " pod="openstack/nova-api-0" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.514793 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fghgs\" (UniqueName: \"kubernetes.io/projected/797a0428-592e-44f8-a1c4-78275dab99c7-kube-api-access-fghgs\") pod \"nova-api-0\" (UID: \"797a0428-592e-44f8-a1c4-78275dab99c7\") " pod="openstack/nova-api-0" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.515831 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m4skp\" (UniqueName: \"kubernetes.io/projected/db25293b-28ae-40dc-a75f-86de34677919-kube-api-access-m4skp\") pod \"nova-cell0-cell-mapping-p6zkw\" (UID: \"db25293b-28ae-40dc-a75f-86de34677919\") " pod="openstack/nova-cell0-cell-mapping-p6zkw" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.535648 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-p6zkw" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.558944 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.612035 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.613288 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.622479 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.637255 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-95m7x\" (UniqueName: \"kubernetes.io/projected/636b4304-ddba-47e8-95de-5549774d728f-kube-api-access-95m7x\") pod \"nova-metadata-0\" (UID: \"636b4304-ddba-47e8-95de-5549774d728f\") " pod="openstack/nova-metadata-0" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.637324 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/636b4304-ddba-47e8-95de-5549774d728f-config-data\") pod \"nova-metadata-0\" (UID: \"636b4304-ddba-47e8-95de-5549774d728f\") " pod="openstack/nova-metadata-0" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.637376 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/636b4304-ddba-47e8-95de-5549774d728f-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"636b4304-ddba-47e8-95de-5549774d728f\") " pod="openstack/nova-metadata-0" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.637426 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/797a0428-592e-44f8-a1c4-78275dab99c7-config-data\") pod \"nova-api-0\" (UID: \"797a0428-592e-44f8-a1c4-78275dab99c7\") " pod="openstack/nova-api-0" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.637453 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fghgs\" (UniqueName: \"kubernetes.io/projected/797a0428-592e-44f8-a1c4-78275dab99c7-kube-api-access-fghgs\") pod \"nova-api-0\" (UID: \"797a0428-592e-44f8-a1c4-78275dab99c7\") " pod="openstack/nova-api-0" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.637478 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/797a0428-592e-44f8-a1c4-78275dab99c7-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"797a0428-592e-44f8-a1c4-78275dab99c7\") " pod="openstack/nova-api-0" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.637523 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/636b4304-ddba-47e8-95de-5549774d728f-logs\") pod \"nova-metadata-0\" (UID: \"636b4304-ddba-47e8-95de-5549774d728f\") " pod="openstack/nova-metadata-0" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.637554 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/797a0428-592e-44f8-a1c4-78275dab99c7-logs\") pod \"nova-api-0\" (UID: \"797a0428-592e-44f8-a1c4-78275dab99c7\") " pod="openstack/nova-api-0" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.637929 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/797a0428-592e-44f8-a1c4-78275dab99c7-logs\") pod \"nova-api-0\" (UID: \"797a0428-592e-44f8-a1c4-78275dab99c7\") " pod="openstack/nova-api-0" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.639014 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/636b4304-ddba-47e8-95de-5549774d728f-logs\") pod \"nova-metadata-0\" (UID: \"636b4304-ddba-47e8-95de-5549774d728f\") " pod="openstack/nova-metadata-0" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.642329 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/636b4304-ddba-47e8-95de-5549774d728f-config-data\") pod \"nova-metadata-0\" (UID: \"636b4304-ddba-47e8-95de-5549774d728f\") " pod="openstack/nova-metadata-0" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.642344 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/797a0428-592e-44f8-a1c4-78275dab99c7-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"797a0428-592e-44f8-a1c4-78275dab99c7\") " pod="openstack/nova-api-0" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.648196 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/797a0428-592e-44f8-a1c4-78275dab99c7-config-data\") pod \"nova-api-0\" (UID: \"797a0428-592e-44f8-a1c4-78275dab99c7\") " pod="openstack/nova-api-0" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.656760 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/636b4304-ddba-47e8-95de-5549774d728f-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"636b4304-ddba-47e8-95de-5549774d728f\") " pod="openstack/nova-metadata-0" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.660191 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.665556 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-95m7x\" (UniqueName: \"kubernetes.io/projected/636b4304-ddba-47e8-95de-5549774d728f-kube-api-access-95m7x\") pod \"nova-metadata-0\" (UID: \"636b4304-ddba-47e8-95de-5549774d728f\") " pod="openstack/nova-metadata-0" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.667132 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fghgs\" (UniqueName: \"kubernetes.io/projected/797a0428-592e-44f8-a1c4-78275dab99c7-kube-api-access-fghgs\") pod \"nova-api-0\" (UID: \"797a0428-592e-44f8-a1c4-78275dab99c7\") " pod="openstack/nova-api-0" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.670676 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.672118 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.673997 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.677614 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6c5b8df6f5-82kwj"] Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.679401 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c5b8df6f5-82kwj" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.699246 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.709253 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.735840 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6c5b8df6f5-82kwj"] Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.740079 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39e1d56f-d495-4b29-b2c7-409bf0e79550-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"39e1d56f-d495-4b29-b2c7-409bf0e79550\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.740135 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2ad00777-208c-404d-bd36-c9b271156a3b-ovsdbserver-nb\") pod \"dnsmasq-dns-6c5b8df6f5-82kwj\" (UID: \"2ad00777-208c-404d-bd36-c9b271156a3b\") " pod="openstack/dnsmasq-dns-6c5b8df6f5-82kwj" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.740173 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c5de83c-d54a-44bc-8cee-f95541ef5a0a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"7c5de83c-d54a-44bc-8cee-f95541ef5a0a\") " pod="openstack/nova-scheduler-0" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.740199 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2ad00777-208c-404d-bd36-c9b271156a3b-config\") pod \"dnsmasq-dns-6c5b8df6f5-82kwj\" (UID: \"2ad00777-208c-404d-bd36-c9b271156a3b\") " pod="openstack/dnsmasq-dns-6c5b8df6f5-82kwj" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.740241 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6b2sr\" (UniqueName: \"kubernetes.io/projected/39e1d56f-d495-4b29-b2c7-409bf0e79550-kube-api-access-6b2sr\") pod \"nova-cell1-novncproxy-0\" (UID: \"39e1d56f-d495-4b29-b2c7-409bf0e79550\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.740313 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2ad00777-208c-404d-bd36-c9b271156a3b-dns-svc\") pod \"dnsmasq-dns-6c5b8df6f5-82kwj\" (UID: \"2ad00777-208c-404d-bd36-c9b271156a3b\") " pod="openstack/dnsmasq-dns-6c5b8df6f5-82kwj" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.740367 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/39e1d56f-d495-4b29-b2c7-409bf0e79550-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"39e1d56f-d495-4b29-b2c7-409bf0e79550\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.740405 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2ad00777-208c-404d-bd36-c9b271156a3b-ovsdbserver-sb\") pod \"dnsmasq-dns-6c5b8df6f5-82kwj\" (UID: \"2ad00777-208c-404d-bd36-c9b271156a3b\") " pod="openstack/dnsmasq-dns-6c5b8df6f5-82kwj" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.740457 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c5de83c-d54a-44bc-8cee-f95541ef5a0a-config-data\") pod \"nova-scheduler-0\" (UID: \"7c5de83c-d54a-44bc-8cee-f95541ef5a0a\") " pod="openstack/nova-scheduler-0" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.740483 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bs264\" (UniqueName: \"kubernetes.io/projected/7c5de83c-d54a-44bc-8cee-f95541ef5a0a-kube-api-access-bs264\") pod \"nova-scheduler-0\" (UID: \"7c5de83c-d54a-44bc-8cee-f95541ef5a0a\") " pod="openstack/nova-scheduler-0" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.740555 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-775sg\" (UniqueName: \"kubernetes.io/projected/2ad00777-208c-404d-bd36-c9b271156a3b-kube-api-access-775sg\") pod \"dnsmasq-dns-6c5b8df6f5-82kwj\" (UID: \"2ad00777-208c-404d-bd36-c9b271156a3b\") " pod="openstack/dnsmasq-dns-6c5b8df6f5-82kwj" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.841887 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2ad00777-208c-404d-bd36-c9b271156a3b-ovsdbserver-sb\") pod \"dnsmasq-dns-6c5b8df6f5-82kwj\" (UID: \"2ad00777-208c-404d-bd36-c9b271156a3b\") " pod="openstack/dnsmasq-dns-6c5b8df6f5-82kwj" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.841951 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c5de83c-d54a-44bc-8cee-f95541ef5a0a-config-data\") pod \"nova-scheduler-0\" (UID: \"7c5de83c-d54a-44bc-8cee-f95541ef5a0a\") " pod="openstack/nova-scheduler-0" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.841985 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bs264\" (UniqueName: \"kubernetes.io/projected/7c5de83c-d54a-44bc-8cee-f95541ef5a0a-kube-api-access-bs264\") pod \"nova-scheduler-0\" (UID: \"7c5de83c-d54a-44bc-8cee-f95541ef5a0a\") " pod="openstack/nova-scheduler-0" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.842029 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-775sg\" (UniqueName: \"kubernetes.io/projected/2ad00777-208c-404d-bd36-c9b271156a3b-kube-api-access-775sg\") pod \"dnsmasq-dns-6c5b8df6f5-82kwj\" (UID: \"2ad00777-208c-404d-bd36-c9b271156a3b\") " pod="openstack/dnsmasq-dns-6c5b8df6f5-82kwj" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.842063 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39e1d56f-d495-4b29-b2c7-409bf0e79550-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"39e1d56f-d495-4b29-b2c7-409bf0e79550\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.842095 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2ad00777-208c-404d-bd36-c9b271156a3b-ovsdbserver-nb\") pod \"dnsmasq-dns-6c5b8df6f5-82kwj\" (UID: \"2ad00777-208c-404d-bd36-c9b271156a3b\") " pod="openstack/dnsmasq-dns-6c5b8df6f5-82kwj" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.842115 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c5de83c-d54a-44bc-8cee-f95541ef5a0a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"7c5de83c-d54a-44bc-8cee-f95541ef5a0a\") " pod="openstack/nova-scheduler-0" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.842140 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2ad00777-208c-404d-bd36-c9b271156a3b-config\") pod \"dnsmasq-dns-6c5b8df6f5-82kwj\" (UID: \"2ad00777-208c-404d-bd36-c9b271156a3b\") " pod="openstack/dnsmasq-dns-6c5b8df6f5-82kwj" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.842178 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6b2sr\" (UniqueName: \"kubernetes.io/projected/39e1d56f-d495-4b29-b2c7-409bf0e79550-kube-api-access-6b2sr\") pod \"nova-cell1-novncproxy-0\" (UID: \"39e1d56f-d495-4b29-b2c7-409bf0e79550\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.842214 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2ad00777-208c-404d-bd36-c9b271156a3b-dns-svc\") pod \"dnsmasq-dns-6c5b8df6f5-82kwj\" (UID: \"2ad00777-208c-404d-bd36-c9b271156a3b\") " pod="openstack/dnsmasq-dns-6c5b8df6f5-82kwj" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.842231 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/39e1d56f-d495-4b29-b2c7-409bf0e79550-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"39e1d56f-d495-4b29-b2c7-409bf0e79550\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.854747 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2ad00777-208c-404d-bd36-c9b271156a3b-ovsdbserver-sb\") pod \"dnsmasq-dns-6c5b8df6f5-82kwj\" (UID: \"2ad00777-208c-404d-bd36-c9b271156a3b\") " pod="openstack/dnsmasq-dns-6c5b8df6f5-82kwj" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.854837 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2ad00777-208c-404d-bd36-c9b271156a3b-config\") pod \"dnsmasq-dns-6c5b8df6f5-82kwj\" (UID: \"2ad00777-208c-404d-bd36-c9b271156a3b\") " pod="openstack/dnsmasq-dns-6c5b8df6f5-82kwj" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.855306 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2ad00777-208c-404d-bd36-c9b271156a3b-ovsdbserver-nb\") pod \"dnsmasq-dns-6c5b8df6f5-82kwj\" (UID: \"2ad00777-208c-404d-bd36-c9b271156a3b\") " pod="openstack/dnsmasq-dns-6c5b8df6f5-82kwj" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.855321 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2ad00777-208c-404d-bd36-c9b271156a3b-dns-svc\") pod \"dnsmasq-dns-6c5b8df6f5-82kwj\" (UID: \"2ad00777-208c-404d-bd36-c9b271156a3b\") " pod="openstack/dnsmasq-dns-6c5b8df6f5-82kwj" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.856514 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39e1d56f-d495-4b29-b2c7-409bf0e79550-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"39e1d56f-d495-4b29-b2c7-409bf0e79550\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.856887 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c5de83c-d54a-44bc-8cee-f95541ef5a0a-config-data\") pod \"nova-scheduler-0\" (UID: \"7c5de83c-d54a-44bc-8cee-f95541ef5a0a\") " pod="openstack/nova-scheduler-0" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.857628 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c5de83c-d54a-44bc-8cee-f95541ef5a0a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"7c5de83c-d54a-44bc-8cee-f95541ef5a0a\") " pod="openstack/nova-scheduler-0" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.858418 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/39e1d56f-d495-4b29-b2c7-409bf0e79550-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"39e1d56f-d495-4b29-b2c7-409bf0e79550\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.863282 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bs264\" (UniqueName: \"kubernetes.io/projected/7c5de83c-d54a-44bc-8cee-f95541ef5a0a-kube-api-access-bs264\") pod \"nova-scheduler-0\" (UID: \"7c5de83c-d54a-44bc-8cee-f95541ef5a0a\") " pod="openstack/nova-scheduler-0" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.863525 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6b2sr\" (UniqueName: \"kubernetes.io/projected/39e1d56f-d495-4b29-b2c7-409bf0e79550-kube-api-access-6b2sr\") pod \"nova-cell1-novncproxy-0\" (UID: \"39e1d56f-d495-4b29-b2c7-409bf0e79550\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.866327 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-775sg\" (UniqueName: \"kubernetes.io/projected/2ad00777-208c-404d-bd36-c9b271156a3b-kube-api-access-775sg\") pod \"dnsmasq-dns-6c5b8df6f5-82kwj\" (UID: \"2ad00777-208c-404d-bd36-c9b271156a3b\") " pod="openstack/dnsmasq-dns-6c5b8df6f5-82kwj" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.951042 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 09:48:50 crc kubenswrapper[4779]: I0929 09:48:50.972707 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 09:48:51 crc kubenswrapper[4779]: I0929 09:48:51.005451 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 29 09:48:51 crc kubenswrapper[4779]: I0929 09:48:51.013555 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c5b8df6f5-82kwj" Sep 29 09:48:51 crc kubenswrapper[4779]: I0929 09:48:51.135331 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-p6zkw"] Sep 29 09:48:51 crc kubenswrapper[4779]: I0929 09:48:51.239134 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-6s6xf"] Sep 29 09:48:51 crc kubenswrapper[4779]: I0929 09:48:51.240335 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-6s6xf" Sep 29 09:48:51 crc kubenswrapper[4779]: I0929 09:48:51.244554 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Sep 29 09:48:51 crc kubenswrapper[4779]: I0929 09:48:51.244722 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Sep 29 09:48:51 crc kubenswrapper[4779]: I0929 09:48:51.259005 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-6s6xf"] Sep 29 09:48:51 crc kubenswrapper[4779]: I0929 09:48:51.278387 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 29 09:48:51 crc kubenswrapper[4779]: I0929 09:48:51.300982 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"797a0428-592e-44f8-a1c4-78275dab99c7","Type":"ContainerStarted","Data":"55c638b952b4a27fbe233b1112f432943608a612c46fe616f8626be1456fb71b"} Sep 29 09:48:51 crc kubenswrapper[4779]: I0929 09:48:51.302734 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-p6zkw" event={"ID":"db25293b-28ae-40dc-a75f-86de34677919","Type":"ContainerStarted","Data":"3413861388b42ef044dc04599b22801568c92b4d1c1063f447923f51556e19ef"} Sep 29 09:48:51 crc kubenswrapper[4779]: I0929 09:48:51.349432 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32f92928-6ba4-4824-94ac-8e20efe26e67-config-data\") pod \"nova-cell1-conductor-db-sync-6s6xf\" (UID: \"32f92928-6ba4-4824-94ac-8e20efe26e67\") " pod="openstack/nova-cell1-conductor-db-sync-6s6xf" Sep 29 09:48:51 crc kubenswrapper[4779]: I0929 09:48:51.350046 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32f92928-6ba4-4824-94ac-8e20efe26e67-scripts\") pod \"nova-cell1-conductor-db-sync-6s6xf\" (UID: \"32f92928-6ba4-4824-94ac-8e20efe26e67\") " pod="openstack/nova-cell1-conductor-db-sync-6s6xf" Sep 29 09:48:51 crc kubenswrapper[4779]: I0929 09:48:51.350078 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fwzwb\" (UniqueName: \"kubernetes.io/projected/32f92928-6ba4-4824-94ac-8e20efe26e67-kube-api-access-fwzwb\") pod \"nova-cell1-conductor-db-sync-6s6xf\" (UID: \"32f92928-6ba4-4824-94ac-8e20efe26e67\") " pod="openstack/nova-cell1-conductor-db-sync-6s6xf" Sep 29 09:48:51 crc kubenswrapper[4779]: I0929 09:48:51.350102 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32f92928-6ba4-4824-94ac-8e20efe26e67-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-6s6xf\" (UID: \"32f92928-6ba4-4824-94ac-8e20efe26e67\") " pod="openstack/nova-cell1-conductor-db-sync-6s6xf" Sep 29 09:48:51 crc kubenswrapper[4779]: I0929 09:48:51.452476 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fwzwb\" (UniqueName: \"kubernetes.io/projected/32f92928-6ba4-4824-94ac-8e20efe26e67-kube-api-access-fwzwb\") pod \"nova-cell1-conductor-db-sync-6s6xf\" (UID: \"32f92928-6ba4-4824-94ac-8e20efe26e67\") " pod="openstack/nova-cell1-conductor-db-sync-6s6xf" Sep 29 09:48:51 crc kubenswrapper[4779]: I0929 09:48:51.452540 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32f92928-6ba4-4824-94ac-8e20efe26e67-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-6s6xf\" (UID: \"32f92928-6ba4-4824-94ac-8e20efe26e67\") " pod="openstack/nova-cell1-conductor-db-sync-6s6xf" Sep 29 09:48:51 crc kubenswrapper[4779]: I0929 09:48:51.452581 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32f92928-6ba4-4824-94ac-8e20efe26e67-config-data\") pod \"nova-cell1-conductor-db-sync-6s6xf\" (UID: \"32f92928-6ba4-4824-94ac-8e20efe26e67\") " pod="openstack/nova-cell1-conductor-db-sync-6s6xf" Sep 29 09:48:51 crc kubenswrapper[4779]: I0929 09:48:51.452710 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32f92928-6ba4-4824-94ac-8e20efe26e67-scripts\") pod \"nova-cell1-conductor-db-sync-6s6xf\" (UID: \"32f92928-6ba4-4824-94ac-8e20efe26e67\") " pod="openstack/nova-cell1-conductor-db-sync-6s6xf" Sep 29 09:48:51 crc kubenswrapper[4779]: I0929 09:48:51.457586 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32f92928-6ba4-4824-94ac-8e20efe26e67-config-data\") pod \"nova-cell1-conductor-db-sync-6s6xf\" (UID: \"32f92928-6ba4-4824-94ac-8e20efe26e67\") " pod="openstack/nova-cell1-conductor-db-sync-6s6xf" Sep 29 09:48:51 crc kubenswrapper[4779]: I0929 09:48:51.457677 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32f92928-6ba4-4824-94ac-8e20efe26e67-scripts\") pod \"nova-cell1-conductor-db-sync-6s6xf\" (UID: \"32f92928-6ba4-4824-94ac-8e20efe26e67\") " pod="openstack/nova-cell1-conductor-db-sync-6s6xf" Sep 29 09:48:51 crc kubenswrapper[4779]: I0929 09:48:51.458560 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32f92928-6ba4-4824-94ac-8e20efe26e67-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-6s6xf\" (UID: \"32f92928-6ba4-4824-94ac-8e20efe26e67\") " pod="openstack/nova-cell1-conductor-db-sync-6s6xf" Sep 29 09:48:51 crc kubenswrapper[4779]: I0929 09:48:51.471804 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fwzwb\" (UniqueName: \"kubernetes.io/projected/32f92928-6ba4-4824-94ac-8e20efe26e67-kube-api-access-fwzwb\") pod \"nova-cell1-conductor-db-sync-6s6xf\" (UID: \"32f92928-6ba4-4824-94ac-8e20efe26e67\") " pod="openstack/nova-cell1-conductor-db-sync-6s6xf" Sep 29 09:48:51 crc kubenswrapper[4779]: I0929 09:48:51.580460 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-6s6xf" Sep 29 09:48:51 crc kubenswrapper[4779]: I0929 09:48:51.615716 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 09:48:51 crc kubenswrapper[4779]: I0929 09:48:51.631034 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 09:48:51 crc kubenswrapper[4779]: I0929 09:48:51.641506 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 09:48:51 crc kubenswrapper[4779]: W0929 09:48:51.666987 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7c5de83c_d54a_44bc_8cee_f95541ef5a0a.slice/crio-68d4f15559d198864f90db775443eb89fed8be2b3aa54904aac67eaf93443a1d WatchSource:0}: Error finding container 68d4f15559d198864f90db775443eb89fed8be2b3aa54904aac67eaf93443a1d: Status 404 returned error can't find the container with id 68d4f15559d198864f90db775443eb89fed8be2b3aa54904aac67eaf93443a1d Sep 29 09:48:51 crc kubenswrapper[4779]: W0929 09:48:51.672325 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod39e1d56f_d495_4b29_b2c7_409bf0e79550.slice/crio-d329346600d7b234112f24b8800d4986502a2755e03278255ce5edf349d7089c WatchSource:0}: Error finding container d329346600d7b234112f24b8800d4986502a2755e03278255ce5edf349d7089c: Status 404 returned error can't find the container with id d329346600d7b234112f24b8800d4986502a2755e03278255ce5edf349d7089c Sep 29 09:48:51 crc kubenswrapper[4779]: I0929 09:48:51.827532 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6c5b8df6f5-82kwj"] Sep 29 09:48:52 crc kubenswrapper[4779]: W0929 09:48:52.065653 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod32f92928_6ba4_4824_94ac_8e20efe26e67.slice/crio-1bb68dd99453afaf4d9d347e16661a17d05c238b71ea19e9287e13c98515127a WatchSource:0}: Error finding container 1bb68dd99453afaf4d9d347e16661a17d05c238b71ea19e9287e13c98515127a: Status 404 returned error can't find the container with id 1bb68dd99453afaf4d9d347e16661a17d05c238b71ea19e9287e13c98515127a Sep 29 09:48:52 crc kubenswrapper[4779]: I0929 09:48:52.066695 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-6s6xf"] Sep 29 09:48:52 crc kubenswrapper[4779]: I0929 09:48:52.313863 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"636b4304-ddba-47e8-95de-5549774d728f","Type":"ContainerStarted","Data":"4e8a3519b87b2bb97857cc9647b497a8c923869747d674600c8b334a6b1da60e"} Sep 29 09:48:52 crc kubenswrapper[4779]: I0929 09:48:52.317608 4779 generic.go:334] "Generic (PLEG): container finished" podID="2ad00777-208c-404d-bd36-c9b271156a3b" containerID="02e33785d22b991ef6c158db19b8e40437b7de2fa1dfea4c5084e90fd547ca99" exitCode=0 Sep 29 09:48:52 crc kubenswrapper[4779]: I0929 09:48:52.317683 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c5b8df6f5-82kwj" event={"ID":"2ad00777-208c-404d-bd36-c9b271156a3b","Type":"ContainerDied","Data":"02e33785d22b991ef6c158db19b8e40437b7de2fa1dfea4c5084e90fd547ca99"} Sep 29 09:48:52 crc kubenswrapper[4779]: I0929 09:48:52.317715 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c5b8df6f5-82kwj" event={"ID":"2ad00777-208c-404d-bd36-c9b271156a3b","Type":"ContainerStarted","Data":"0a343c4dba34dda5eda393cafdad180ac60ed02c43d26ef4585710882db9928c"} Sep 29 09:48:52 crc kubenswrapper[4779]: I0929 09:48:52.323034 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-p6zkw" event={"ID":"db25293b-28ae-40dc-a75f-86de34677919","Type":"ContainerStarted","Data":"42175e97ef30e091bd98a7f085d98538d528664c6d59cf720074da433264fe75"} Sep 29 09:48:52 crc kubenswrapper[4779]: I0929 09:48:52.326334 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"39e1d56f-d495-4b29-b2c7-409bf0e79550","Type":"ContainerStarted","Data":"d329346600d7b234112f24b8800d4986502a2755e03278255ce5edf349d7089c"} Sep 29 09:48:52 crc kubenswrapper[4779]: I0929 09:48:52.331212 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"7c5de83c-d54a-44bc-8cee-f95541ef5a0a","Type":"ContainerStarted","Data":"68d4f15559d198864f90db775443eb89fed8be2b3aa54904aac67eaf93443a1d"} Sep 29 09:48:52 crc kubenswrapper[4779]: I0929 09:48:52.339522 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-6s6xf" event={"ID":"32f92928-6ba4-4824-94ac-8e20efe26e67","Type":"ContainerStarted","Data":"8b8b9cc7488074b98f47def0550430ef1ae160698979702264ad74e926ac6fe0"} Sep 29 09:48:52 crc kubenswrapper[4779]: I0929 09:48:52.339596 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-6s6xf" event={"ID":"32f92928-6ba4-4824-94ac-8e20efe26e67","Type":"ContainerStarted","Data":"1bb68dd99453afaf4d9d347e16661a17d05c238b71ea19e9287e13c98515127a"} Sep 29 09:48:52 crc kubenswrapper[4779]: I0929 09:48:52.388279 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-p6zkw" podStartSLOduration=2.388212807 podStartE2EDuration="2.388212807s" podCreationTimestamp="2025-09-29 09:48:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:48:52.371221891 +0000 UTC m=+1164.352545795" watchObservedRunningTime="2025-09-29 09:48:52.388212807 +0000 UTC m=+1164.369536721" Sep 29 09:48:52 crc kubenswrapper[4779]: I0929 09:48:52.420977 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-6s6xf" podStartSLOduration=1.420955373 podStartE2EDuration="1.420955373s" podCreationTimestamp="2025-09-29 09:48:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:48:52.397522723 +0000 UTC m=+1164.378846627" watchObservedRunningTime="2025-09-29 09:48:52.420955373 +0000 UTC m=+1164.402279277" Sep 29 09:48:52 crc kubenswrapper[4779]: I0929 09:48:52.715849 4779 scope.go:117] "RemoveContainer" containerID="04ec2cc73b2c81ba077c7fc00e233e5df22795dfb270435052946778c9421ed3" Sep 29 09:48:54 crc kubenswrapper[4779]: I0929 09:48:54.388984 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 09:48:54 crc kubenswrapper[4779]: I0929 09:48:54.401713 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 09:48:55 crc kubenswrapper[4779]: I0929 09:48:55.389137 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c5b8df6f5-82kwj" event={"ID":"2ad00777-208c-404d-bd36-c9b271156a3b","Type":"ContainerStarted","Data":"fddeffb5978318ff4a7ea04690b0e1223843c5207c79250f72bf88f3f6593ef7"} Sep 29 09:48:55 crc kubenswrapper[4779]: I0929 09:48:55.389680 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6c5b8df6f5-82kwj" Sep 29 09:48:55 crc kubenswrapper[4779]: I0929 09:48:55.391359 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"797a0428-592e-44f8-a1c4-78275dab99c7","Type":"ContainerStarted","Data":"7228de1311977a530ea2451e7e6ff647d19cc8ec9779253fb1e1b55f451df11c"} Sep 29 09:48:55 crc kubenswrapper[4779]: I0929 09:48:55.391408 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"797a0428-592e-44f8-a1c4-78275dab99c7","Type":"ContainerStarted","Data":"9be430b40b49986839bf6f6725ac2cbfffecfc00f565076bdea037ca9648e19a"} Sep 29 09:48:55 crc kubenswrapper[4779]: I0929 09:48:55.393459 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"f67e636b-969b-48ee-bbec-3d8b38b22274","Type":"ContainerStarted","Data":"3eb7e06311b67b9128c6c006dc1d7ed6d1f20a00ed64c27b5f97404d4ecbb462"} Sep 29 09:48:55 crc kubenswrapper[4779]: I0929 09:48:55.394854 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"7c5de83c-d54a-44bc-8cee-f95541ef5a0a","Type":"ContainerStarted","Data":"fce0e087c6951450a195fef03c79845bd12864e723ebac5002d1730b5c768669"} Sep 29 09:48:55 crc kubenswrapper[4779]: I0929 09:48:55.397405 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"39e1d56f-d495-4b29-b2c7-409bf0e79550","Type":"ContainerStarted","Data":"ca5de4dbbba4b79379498b14547fa6477f634ef182151e235943fcdc9dfec369"} Sep 29 09:48:55 crc kubenswrapper[4779]: I0929 09:48:55.397515 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="39e1d56f-d495-4b29-b2c7-409bf0e79550" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://ca5de4dbbba4b79379498b14547fa6477f634ef182151e235943fcdc9dfec369" gracePeriod=30 Sep 29 09:48:55 crc kubenswrapper[4779]: I0929 09:48:55.403973 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"636b4304-ddba-47e8-95de-5549774d728f","Type":"ContainerStarted","Data":"3176e4f5fc8327a584ae3e110b00b719167f1f62a10c7dd56b5e169bb9ba9c33"} Sep 29 09:48:55 crc kubenswrapper[4779]: I0929 09:48:55.404014 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"636b4304-ddba-47e8-95de-5549774d728f","Type":"ContainerStarted","Data":"3a2216f5b11e108624c78c37407bf869028c345eaa9dfdb1abebcab9ab3fcdcb"} Sep 29 09:48:55 crc kubenswrapper[4779]: I0929 09:48:55.404241 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="636b4304-ddba-47e8-95de-5549774d728f" containerName="nova-metadata-log" containerID="cri-o://3a2216f5b11e108624c78c37407bf869028c345eaa9dfdb1abebcab9ab3fcdcb" gracePeriod=30 Sep 29 09:48:55 crc kubenswrapper[4779]: I0929 09:48:55.404266 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="636b4304-ddba-47e8-95de-5549774d728f" containerName="nova-metadata-metadata" containerID="cri-o://3176e4f5fc8327a584ae3e110b00b719167f1f62a10c7dd56b5e169bb9ba9c33" gracePeriod=30 Sep 29 09:48:55 crc kubenswrapper[4779]: I0929 09:48:55.427126 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6c5b8df6f5-82kwj" podStartSLOduration=5.427093432 podStartE2EDuration="5.427093432s" podCreationTimestamp="2025-09-29 09:48:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:48:55.42143472 +0000 UTC m=+1167.402758624" watchObservedRunningTime="2025-09-29 09:48:55.427093432 +0000 UTC m=+1167.408417336" Sep 29 09:48:55 crc kubenswrapper[4779]: I0929 09:48:55.453371 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.495427735 podStartE2EDuration="5.453354074s" podCreationTimestamp="2025-09-29 09:48:50 +0000 UTC" firstStartedPulling="2025-09-29 09:48:51.285059675 +0000 UTC m=+1163.266383579" lastFinishedPulling="2025-09-29 09:48:54.242986004 +0000 UTC m=+1166.224309918" observedRunningTime="2025-09-29 09:48:55.446376774 +0000 UTC m=+1167.427700678" watchObservedRunningTime="2025-09-29 09:48:55.453354074 +0000 UTC m=+1167.434677978" Sep 29 09:48:55 crc kubenswrapper[4779]: I0929 09:48:55.465914 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.8952432420000003 podStartE2EDuration="5.465883392s" podCreationTimestamp="2025-09-29 09:48:50 +0000 UTC" firstStartedPulling="2025-09-29 09:48:51.672391746 +0000 UTC m=+1163.653715650" lastFinishedPulling="2025-09-29 09:48:54.243031876 +0000 UTC m=+1166.224355800" observedRunningTime="2025-09-29 09:48:55.46092246 +0000 UTC m=+1167.442246364" watchObservedRunningTime="2025-09-29 09:48:55.465883392 +0000 UTC m=+1167.447207306" Sep 29 09:48:55 crc kubenswrapper[4779]: I0929 09:48:55.507872 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.924789758 podStartE2EDuration="5.507856413s" podCreationTimestamp="2025-09-29 09:48:50 +0000 UTC" firstStartedPulling="2025-09-29 09:48:51.659805256 +0000 UTC m=+1163.641129160" lastFinishedPulling="2025-09-29 09:48:54.242871881 +0000 UTC m=+1166.224195815" observedRunningTime="2025-09-29 09:48:55.503430876 +0000 UTC m=+1167.484754780" watchObservedRunningTime="2025-09-29 09:48:55.507856413 +0000 UTC m=+1167.489180317" Sep 29 09:48:55 crc kubenswrapper[4779]: I0929 09:48:55.553109 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.984856996 podStartE2EDuration="5.553092057s" podCreationTimestamp="2025-09-29 09:48:50 +0000 UTC" firstStartedPulling="2025-09-29 09:48:51.675653359 +0000 UTC m=+1163.656977273" lastFinishedPulling="2025-09-29 09:48:54.24388839 +0000 UTC m=+1166.225212334" observedRunningTime="2025-09-29 09:48:55.545630894 +0000 UTC m=+1167.526954798" watchObservedRunningTime="2025-09-29 09:48:55.553092057 +0000 UTC m=+1167.534415961" Sep 29 09:48:55 crc kubenswrapper[4779]: I0929 09:48:55.764456 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Sep 29 09:48:55 crc kubenswrapper[4779]: I0929 09:48:55.919479 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 09:48:55 crc kubenswrapper[4779]: I0929 09:48:55.967412 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-95m7x\" (UniqueName: \"kubernetes.io/projected/636b4304-ddba-47e8-95de-5549774d728f-kube-api-access-95m7x\") pod \"636b4304-ddba-47e8-95de-5549774d728f\" (UID: \"636b4304-ddba-47e8-95de-5549774d728f\") " Sep 29 09:48:55 crc kubenswrapper[4779]: I0929 09:48:55.967586 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/636b4304-ddba-47e8-95de-5549774d728f-combined-ca-bundle\") pod \"636b4304-ddba-47e8-95de-5549774d728f\" (UID: \"636b4304-ddba-47e8-95de-5549774d728f\") " Sep 29 09:48:55 crc kubenswrapper[4779]: I0929 09:48:55.967627 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/636b4304-ddba-47e8-95de-5549774d728f-config-data\") pod \"636b4304-ddba-47e8-95de-5549774d728f\" (UID: \"636b4304-ddba-47e8-95de-5549774d728f\") " Sep 29 09:48:55 crc kubenswrapper[4779]: I0929 09:48:55.967669 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/636b4304-ddba-47e8-95de-5549774d728f-logs\") pod \"636b4304-ddba-47e8-95de-5549774d728f\" (UID: \"636b4304-ddba-47e8-95de-5549774d728f\") " Sep 29 09:48:55 crc kubenswrapper[4779]: I0929 09:48:55.968031 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/636b4304-ddba-47e8-95de-5549774d728f-logs" (OuterVolumeSpecName: "logs") pod "636b4304-ddba-47e8-95de-5549774d728f" (UID: "636b4304-ddba-47e8-95de-5549774d728f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:48:55 crc kubenswrapper[4779]: I0929 09:48:55.968256 4779 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/636b4304-ddba-47e8-95de-5549774d728f-logs\") on node \"crc\" DevicePath \"\"" Sep 29 09:48:55 crc kubenswrapper[4779]: I0929 09:48:55.973761 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Sep 29 09:48:55 crc kubenswrapper[4779]: I0929 09:48:55.976024 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/636b4304-ddba-47e8-95de-5549774d728f-kube-api-access-95m7x" (OuterVolumeSpecName: "kube-api-access-95m7x") pod "636b4304-ddba-47e8-95de-5549774d728f" (UID: "636b4304-ddba-47e8-95de-5549774d728f"). InnerVolumeSpecName "kube-api-access-95m7x". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:48:56 crc kubenswrapper[4779]: I0929 09:48:56.001555 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/636b4304-ddba-47e8-95de-5549774d728f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "636b4304-ddba-47e8-95de-5549774d728f" (UID: "636b4304-ddba-47e8-95de-5549774d728f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:48:56 crc kubenswrapper[4779]: I0929 09:48:56.001988 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/636b4304-ddba-47e8-95de-5549774d728f-config-data" (OuterVolumeSpecName: "config-data") pod "636b4304-ddba-47e8-95de-5549774d728f" (UID: "636b4304-ddba-47e8-95de-5549774d728f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:48:56 crc kubenswrapper[4779]: I0929 09:48:56.006762 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Sep 29 09:48:56 crc kubenswrapper[4779]: I0929 09:48:56.069674 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-95m7x\" (UniqueName: \"kubernetes.io/projected/636b4304-ddba-47e8-95de-5549774d728f-kube-api-access-95m7x\") on node \"crc\" DevicePath \"\"" Sep 29 09:48:56 crc kubenswrapper[4779]: I0929 09:48:56.069700 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/636b4304-ddba-47e8-95de-5549774d728f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 09:48:56 crc kubenswrapper[4779]: I0929 09:48:56.069711 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/636b4304-ddba-47e8-95de-5549774d728f-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 09:48:56 crc kubenswrapper[4779]: I0929 09:48:56.422485 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 09:48:56 crc kubenswrapper[4779]: I0929 09:48:56.426029 4779 generic.go:334] "Generic (PLEG): container finished" podID="636b4304-ddba-47e8-95de-5549774d728f" containerID="3176e4f5fc8327a584ae3e110b00b719167f1f62a10c7dd56b5e169bb9ba9c33" exitCode=0 Sep 29 09:48:56 crc kubenswrapper[4779]: I0929 09:48:56.426080 4779 generic.go:334] "Generic (PLEG): container finished" podID="636b4304-ddba-47e8-95de-5549774d728f" containerID="3a2216f5b11e108624c78c37407bf869028c345eaa9dfdb1abebcab9ab3fcdcb" exitCode=143 Sep 29 09:48:56 crc kubenswrapper[4779]: I0929 09:48:56.426221 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"636b4304-ddba-47e8-95de-5549774d728f","Type":"ContainerDied","Data":"3176e4f5fc8327a584ae3e110b00b719167f1f62a10c7dd56b5e169bb9ba9c33"} Sep 29 09:48:56 crc kubenswrapper[4779]: I0929 09:48:56.426286 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"636b4304-ddba-47e8-95de-5549774d728f","Type":"ContainerDied","Data":"3a2216f5b11e108624c78c37407bf869028c345eaa9dfdb1abebcab9ab3fcdcb"} Sep 29 09:48:56 crc kubenswrapper[4779]: I0929 09:48:56.426307 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"636b4304-ddba-47e8-95de-5549774d728f","Type":"ContainerDied","Data":"4e8a3519b87b2bb97857cc9647b497a8c923869747d674600c8b334a6b1da60e"} Sep 29 09:48:56 crc kubenswrapper[4779]: I0929 09:48:56.426336 4779 scope.go:117] "RemoveContainer" containerID="3176e4f5fc8327a584ae3e110b00b719167f1f62a10c7dd56b5e169bb9ba9c33" Sep 29 09:48:56 crc kubenswrapper[4779]: I0929 09:48:56.465513 4779 scope.go:117] "RemoveContainer" containerID="3a2216f5b11e108624c78c37407bf869028c345eaa9dfdb1abebcab9ab3fcdcb" Sep 29 09:48:56 crc kubenswrapper[4779]: I0929 09:48:56.468983 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 09:48:56 crc kubenswrapper[4779]: I0929 09:48:56.478061 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 09:48:56 crc kubenswrapper[4779]: I0929 09:48:56.495077 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 29 09:48:56 crc kubenswrapper[4779]: E0929 09:48:56.495987 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="636b4304-ddba-47e8-95de-5549774d728f" containerName="nova-metadata-log" Sep 29 09:48:56 crc kubenswrapper[4779]: I0929 09:48:56.496027 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="636b4304-ddba-47e8-95de-5549774d728f" containerName="nova-metadata-log" Sep 29 09:48:56 crc kubenswrapper[4779]: E0929 09:48:56.496086 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="636b4304-ddba-47e8-95de-5549774d728f" containerName="nova-metadata-metadata" Sep 29 09:48:56 crc kubenswrapper[4779]: I0929 09:48:56.496096 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="636b4304-ddba-47e8-95de-5549774d728f" containerName="nova-metadata-metadata" Sep 29 09:48:56 crc kubenswrapper[4779]: I0929 09:48:56.496435 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="636b4304-ddba-47e8-95de-5549774d728f" containerName="nova-metadata-log" Sep 29 09:48:56 crc kubenswrapper[4779]: I0929 09:48:56.496452 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="636b4304-ddba-47e8-95de-5549774d728f" containerName="nova-metadata-metadata" Sep 29 09:48:56 crc kubenswrapper[4779]: I0929 09:48:56.497429 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 09:48:56 crc kubenswrapper[4779]: I0929 09:48:56.502197 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Sep 29 09:48:56 crc kubenswrapper[4779]: I0929 09:48:56.502656 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 29 09:48:56 crc kubenswrapper[4779]: I0929 09:48:56.508547 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 09:48:56 crc kubenswrapper[4779]: I0929 09:48:56.574497 4779 scope.go:117] "RemoveContainer" containerID="3176e4f5fc8327a584ae3e110b00b719167f1f62a10c7dd56b5e169bb9ba9c33" Sep 29 09:48:56 crc kubenswrapper[4779]: E0929 09:48:56.576573 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3176e4f5fc8327a584ae3e110b00b719167f1f62a10c7dd56b5e169bb9ba9c33\": container with ID starting with 3176e4f5fc8327a584ae3e110b00b719167f1f62a10c7dd56b5e169bb9ba9c33 not found: ID does not exist" containerID="3176e4f5fc8327a584ae3e110b00b719167f1f62a10c7dd56b5e169bb9ba9c33" Sep 29 09:48:56 crc kubenswrapper[4779]: I0929 09:48:56.576617 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3176e4f5fc8327a584ae3e110b00b719167f1f62a10c7dd56b5e169bb9ba9c33"} err="failed to get container status \"3176e4f5fc8327a584ae3e110b00b719167f1f62a10c7dd56b5e169bb9ba9c33\": rpc error: code = NotFound desc = could not find container \"3176e4f5fc8327a584ae3e110b00b719167f1f62a10c7dd56b5e169bb9ba9c33\": container with ID starting with 3176e4f5fc8327a584ae3e110b00b719167f1f62a10c7dd56b5e169bb9ba9c33 not found: ID does not exist" Sep 29 09:48:56 crc kubenswrapper[4779]: I0929 09:48:56.576644 4779 scope.go:117] "RemoveContainer" containerID="3a2216f5b11e108624c78c37407bf869028c345eaa9dfdb1abebcab9ab3fcdcb" Sep 29 09:48:56 crc kubenswrapper[4779]: E0929 09:48:56.576932 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3a2216f5b11e108624c78c37407bf869028c345eaa9dfdb1abebcab9ab3fcdcb\": container with ID starting with 3a2216f5b11e108624c78c37407bf869028c345eaa9dfdb1abebcab9ab3fcdcb not found: ID does not exist" containerID="3a2216f5b11e108624c78c37407bf869028c345eaa9dfdb1abebcab9ab3fcdcb" Sep 29 09:48:56 crc kubenswrapper[4779]: I0929 09:48:56.576957 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3a2216f5b11e108624c78c37407bf869028c345eaa9dfdb1abebcab9ab3fcdcb"} err="failed to get container status \"3a2216f5b11e108624c78c37407bf869028c345eaa9dfdb1abebcab9ab3fcdcb\": rpc error: code = NotFound desc = could not find container \"3a2216f5b11e108624c78c37407bf869028c345eaa9dfdb1abebcab9ab3fcdcb\": container with ID starting with 3a2216f5b11e108624c78c37407bf869028c345eaa9dfdb1abebcab9ab3fcdcb not found: ID does not exist" Sep 29 09:48:56 crc kubenswrapper[4779]: I0929 09:48:56.576971 4779 scope.go:117] "RemoveContainer" containerID="3176e4f5fc8327a584ae3e110b00b719167f1f62a10c7dd56b5e169bb9ba9c33" Sep 29 09:48:56 crc kubenswrapper[4779]: I0929 09:48:56.577162 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3176e4f5fc8327a584ae3e110b00b719167f1f62a10c7dd56b5e169bb9ba9c33"} err="failed to get container status \"3176e4f5fc8327a584ae3e110b00b719167f1f62a10c7dd56b5e169bb9ba9c33\": rpc error: code = NotFound desc = could not find container \"3176e4f5fc8327a584ae3e110b00b719167f1f62a10c7dd56b5e169bb9ba9c33\": container with ID starting with 3176e4f5fc8327a584ae3e110b00b719167f1f62a10c7dd56b5e169bb9ba9c33 not found: ID does not exist" Sep 29 09:48:56 crc kubenswrapper[4779]: I0929 09:48:56.577180 4779 scope.go:117] "RemoveContainer" containerID="3a2216f5b11e108624c78c37407bf869028c345eaa9dfdb1abebcab9ab3fcdcb" Sep 29 09:48:56 crc kubenswrapper[4779]: I0929 09:48:56.577358 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3a2216f5b11e108624c78c37407bf869028c345eaa9dfdb1abebcab9ab3fcdcb"} err="failed to get container status \"3a2216f5b11e108624c78c37407bf869028c345eaa9dfdb1abebcab9ab3fcdcb\": rpc error: code = NotFound desc = could not find container \"3a2216f5b11e108624c78c37407bf869028c345eaa9dfdb1abebcab9ab3fcdcb\": container with ID starting with 3a2216f5b11e108624c78c37407bf869028c345eaa9dfdb1abebcab9ab3fcdcb not found: ID does not exist" Sep 29 09:48:56 crc kubenswrapper[4779]: I0929 09:48:56.585645 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mrdrm\" (UniqueName: \"kubernetes.io/projected/498f7b6b-1eee-4b9a-b987-2ebdb93d0d11-kube-api-access-mrdrm\") pod \"nova-metadata-0\" (UID: \"498f7b6b-1eee-4b9a-b987-2ebdb93d0d11\") " pod="openstack/nova-metadata-0" Sep 29 09:48:56 crc kubenswrapper[4779]: I0929 09:48:56.585964 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/498f7b6b-1eee-4b9a-b987-2ebdb93d0d11-logs\") pod \"nova-metadata-0\" (UID: \"498f7b6b-1eee-4b9a-b987-2ebdb93d0d11\") " pod="openstack/nova-metadata-0" Sep 29 09:48:56 crc kubenswrapper[4779]: I0929 09:48:56.586072 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/498f7b6b-1eee-4b9a-b987-2ebdb93d0d11-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"498f7b6b-1eee-4b9a-b987-2ebdb93d0d11\") " pod="openstack/nova-metadata-0" Sep 29 09:48:56 crc kubenswrapper[4779]: I0929 09:48:56.586184 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/498f7b6b-1eee-4b9a-b987-2ebdb93d0d11-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"498f7b6b-1eee-4b9a-b987-2ebdb93d0d11\") " pod="openstack/nova-metadata-0" Sep 29 09:48:56 crc kubenswrapper[4779]: I0929 09:48:56.586352 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/498f7b6b-1eee-4b9a-b987-2ebdb93d0d11-config-data\") pod \"nova-metadata-0\" (UID: \"498f7b6b-1eee-4b9a-b987-2ebdb93d0d11\") " pod="openstack/nova-metadata-0" Sep 29 09:48:56 crc kubenswrapper[4779]: I0929 09:48:56.688820 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/498f7b6b-1eee-4b9a-b987-2ebdb93d0d11-logs\") pod \"nova-metadata-0\" (UID: \"498f7b6b-1eee-4b9a-b987-2ebdb93d0d11\") " pod="openstack/nova-metadata-0" Sep 29 09:48:56 crc kubenswrapper[4779]: I0929 09:48:56.688916 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/498f7b6b-1eee-4b9a-b987-2ebdb93d0d11-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"498f7b6b-1eee-4b9a-b987-2ebdb93d0d11\") " pod="openstack/nova-metadata-0" Sep 29 09:48:56 crc kubenswrapper[4779]: I0929 09:48:56.688985 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/498f7b6b-1eee-4b9a-b987-2ebdb93d0d11-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"498f7b6b-1eee-4b9a-b987-2ebdb93d0d11\") " pod="openstack/nova-metadata-0" Sep 29 09:48:56 crc kubenswrapper[4779]: I0929 09:48:56.689093 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/498f7b6b-1eee-4b9a-b987-2ebdb93d0d11-config-data\") pod \"nova-metadata-0\" (UID: \"498f7b6b-1eee-4b9a-b987-2ebdb93d0d11\") " pod="openstack/nova-metadata-0" Sep 29 09:48:56 crc kubenswrapper[4779]: I0929 09:48:56.689152 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mrdrm\" (UniqueName: \"kubernetes.io/projected/498f7b6b-1eee-4b9a-b987-2ebdb93d0d11-kube-api-access-mrdrm\") pod \"nova-metadata-0\" (UID: \"498f7b6b-1eee-4b9a-b987-2ebdb93d0d11\") " pod="openstack/nova-metadata-0" Sep 29 09:48:56 crc kubenswrapper[4779]: I0929 09:48:56.689311 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/498f7b6b-1eee-4b9a-b987-2ebdb93d0d11-logs\") pod \"nova-metadata-0\" (UID: \"498f7b6b-1eee-4b9a-b987-2ebdb93d0d11\") " pod="openstack/nova-metadata-0" Sep 29 09:48:56 crc kubenswrapper[4779]: I0929 09:48:56.705489 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/498f7b6b-1eee-4b9a-b987-2ebdb93d0d11-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"498f7b6b-1eee-4b9a-b987-2ebdb93d0d11\") " pod="openstack/nova-metadata-0" Sep 29 09:48:56 crc kubenswrapper[4779]: I0929 09:48:56.707091 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/498f7b6b-1eee-4b9a-b987-2ebdb93d0d11-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"498f7b6b-1eee-4b9a-b987-2ebdb93d0d11\") " pod="openstack/nova-metadata-0" Sep 29 09:48:56 crc kubenswrapper[4779]: I0929 09:48:56.707493 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/498f7b6b-1eee-4b9a-b987-2ebdb93d0d11-config-data\") pod \"nova-metadata-0\" (UID: \"498f7b6b-1eee-4b9a-b987-2ebdb93d0d11\") " pod="openstack/nova-metadata-0" Sep 29 09:48:56 crc kubenswrapper[4779]: I0929 09:48:56.709401 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mrdrm\" (UniqueName: \"kubernetes.io/projected/498f7b6b-1eee-4b9a-b987-2ebdb93d0d11-kube-api-access-mrdrm\") pod \"nova-metadata-0\" (UID: \"498f7b6b-1eee-4b9a-b987-2ebdb93d0d11\") " pod="openstack/nova-metadata-0" Sep 29 09:48:56 crc kubenswrapper[4779]: I0929 09:48:56.727821 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="636b4304-ddba-47e8-95de-5549774d728f" path="/var/lib/kubelet/pods/636b4304-ddba-47e8-95de-5549774d728f/volumes" Sep 29 09:48:56 crc kubenswrapper[4779]: I0929 09:48:56.874515 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 09:48:57 crc kubenswrapper[4779]: W0929 09:48:57.359077 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod498f7b6b_1eee_4b9a_b987_2ebdb93d0d11.slice/crio-29dd4b2693581af23760e49a941dd43864e434499e913612ac5de8f15b82a20d WatchSource:0}: Error finding container 29dd4b2693581af23760e49a941dd43864e434499e913612ac5de8f15b82a20d: Status 404 returned error can't find the container with id 29dd4b2693581af23760e49a941dd43864e434499e913612ac5de8f15b82a20d Sep 29 09:48:57 crc kubenswrapper[4779]: I0929 09:48:57.363267 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 09:48:57 crc kubenswrapper[4779]: I0929 09:48:57.444491 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"498f7b6b-1eee-4b9a-b987-2ebdb93d0d11","Type":"ContainerStarted","Data":"29dd4b2693581af23760e49a941dd43864e434499e913612ac5de8f15b82a20d"} Sep 29 09:48:58 crc kubenswrapper[4779]: I0929 09:48:58.474519 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"498f7b6b-1eee-4b9a-b987-2ebdb93d0d11","Type":"ContainerStarted","Data":"9c747c3f8da7f8f9ecc0286d18c7958f90b976b20d3416af8c5fd47a134e114e"} Sep 29 09:48:58 crc kubenswrapper[4779]: I0929 09:48:58.475098 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"498f7b6b-1eee-4b9a-b987-2ebdb93d0d11","Type":"ContainerStarted","Data":"1f046b998505a2559f3fdc535efea622ba972734d46f0e7feb526ee3f20e9330"} Sep 29 09:48:58 crc kubenswrapper[4779]: I0929 09:48:58.499941 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.49992335 podStartE2EDuration="2.49992335s" podCreationTimestamp="2025-09-29 09:48:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:48:58.49398796 +0000 UTC m=+1170.475311884" watchObservedRunningTime="2025-09-29 09:48:58.49992335 +0000 UTC m=+1170.481247254" Sep 29 09:48:58 crc kubenswrapper[4779]: I0929 09:48:58.754713 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 09:48:58 crc kubenswrapper[4779]: I0929 09:48:58.755135 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="eb6a2725-2f24-4e1f-9791-d544f59eddeb" containerName="kube-state-metrics" containerID="cri-o://a1341959cf67b778d67e535ec3749d70826d4a07b43880fc7d9a61c1370c671a" gracePeriod=30 Sep 29 09:48:59 crc kubenswrapper[4779]: I0929 09:48:59.345599 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 29 09:48:59 crc kubenswrapper[4779]: I0929 09:48:59.442721 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nj2p7\" (UniqueName: \"kubernetes.io/projected/eb6a2725-2f24-4e1f-9791-d544f59eddeb-kube-api-access-nj2p7\") pod \"eb6a2725-2f24-4e1f-9791-d544f59eddeb\" (UID: \"eb6a2725-2f24-4e1f-9791-d544f59eddeb\") " Sep 29 09:48:59 crc kubenswrapper[4779]: I0929 09:48:59.451356 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eb6a2725-2f24-4e1f-9791-d544f59eddeb-kube-api-access-nj2p7" (OuterVolumeSpecName: "kube-api-access-nj2p7") pod "eb6a2725-2f24-4e1f-9791-d544f59eddeb" (UID: "eb6a2725-2f24-4e1f-9791-d544f59eddeb"). InnerVolumeSpecName "kube-api-access-nj2p7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:48:59 crc kubenswrapper[4779]: I0929 09:48:59.486259 4779 generic.go:334] "Generic (PLEG): container finished" podID="eb6a2725-2f24-4e1f-9791-d544f59eddeb" containerID="a1341959cf67b778d67e535ec3749d70826d4a07b43880fc7d9a61c1370c671a" exitCode=2 Sep 29 09:48:59 crc kubenswrapper[4779]: I0929 09:48:59.486324 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 29 09:48:59 crc kubenswrapper[4779]: I0929 09:48:59.486363 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"eb6a2725-2f24-4e1f-9791-d544f59eddeb","Type":"ContainerDied","Data":"a1341959cf67b778d67e535ec3749d70826d4a07b43880fc7d9a61c1370c671a"} Sep 29 09:48:59 crc kubenswrapper[4779]: I0929 09:48:59.486424 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"eb6a2725-2f24-4e1f-9791-d544f59eddeb","Type":"ContainerDied","Data":"9459d52f100ff40f181d7bd7b0be17c8db292f239c2fe274eb5981d423b4d7ef"} Sep 29 09:48:59 crc kubenswrapper[4779]: I0929 09:48:59.486449 4779 scope.go:117] "RemoveContainer" containerID="a1341959cf67b778d67e535ec3749d70826d4a07b43880fc7d9a61c1370c671a" Sep 29 09:48:59 crc kubenswrapper[4779]: I0929 09:48:59.530094 4779 scope.go:117] "RemoveContainer" containerID="a1341959cf67b778d67e535ec3749d70826d4a07b43880fc7d9a61c1370c671a" Sep 29 09:48:59 crc kubenswrapper[4779]: E0929 09:48:59.530753 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a1341959cf67b778d67e535ec3749d70826d4a07b43880fc7d9a61c1370c671a\": container with ID starting with a1341959cf67b778d67e535ec3749d70826d4a07b43880fc7d9a61c1370c671a not found: ID does not exist" containerID="a1341959cf67b778d67e535ec3749d70826d4a07b43880fc7d9a61c1370c671a" Sep 29 09:48:59 crc kubenswrapper[4779]: I0929 09:48:59.530780 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a1341959cf67b778d67e535ec3749d70826d4a07b43880fc7d9a61c1370c671a"} err="failed to get container status \"a1341959cf67b778d67e535ec3749d70826d4a07b43880fc7d9a61c1370c671a\": rpc error: code = NotFound desc = could not find container \"a1341959cf67b778d67e535ec3749d70826d4a07b43880fc7d9a61c1370c671a\": container with ID starting with a1341959cf67b778d67e535ec3749d70826d4a07b43880fc7d9a61c1370c671a not found: ID does not exist" Sep 29 09:48:59 crc kubenswrapper[4779]: I0929 09:48:59.545028 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nj2p7\" (UniqueName: \"kubernetes.io/projected/eb6a2725-2f24-4e1f-9791-d544f59eddeb-kube-api-access-nj2p7\") on node \"crc\" DevicePath \"\"" Sep 29 09:48:59 crc kubenswrapper[4779]: I0929 09:48:59.546948 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 09:48:59 crc kubenswrapper[4779]: I0929 09:48:59.552163 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 09:48:59 crc kubenswrapper[4779]: I0929 09:48:59.587749 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 09:48:59 crc kubenswrapper[4779]: E0929 09:48:59.588179 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb6a2725-2f24-4e1f-9791-d544f59eddeb" containerName="kube-state-metrics" Sep 29 09:48:59 crc kubenswrapper[4779]: I0929 09:48:59.588195 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb6a2725-2f24-4e1f-9791-d544f59eddeb" containerName="kube-state-metrics" Sep 29 09:48:59 crc kubenswrapper[4779]: I0929 09:48:59.588355 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb6a2725-2f24-4e1f-9791-d544f59eddeb" containerName="kube-state-metrics" Sep 29 09:48:59 crc kubenswrapper[4779]: I0929 09:48:59.589001 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 29 09:48:59 crc kubenswrapper[4779]: I0929 09:48:59.593095 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Sep 29 09:48:59 crc kubenswrapper[4779]: I0929 09:48:59.602597 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 09:48:59 crc kubenswrapper[4779]: I0929 09:48:59.604037 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Sep 29 09:48:59 crc kubenswrapper[4779]: I0929 09:48:59.749754 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/0064d445-dfc2-4f0f-b4ae-26ebc2a1d3ea-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"0064d445-dfc2-4f0f-b4ae-26ebc2a1d3ea\") " pod="openstack/kube-state-metrics-0" Sep 29 09:48:59 crc kubenswrapper[4779]: I0929 09:48:59.749806 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/0064d445-dfc2-4f0f-b4ae-26ebc2a1d3ea-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"0064d445-dfc2-4f0f-b4ae-26ebc2a1d3ea\") " pod="openstack/kube-state-metrics-0" Sep 29 09:48:59 crc kubenswrapper[4779]: I0929 09:48:59.749840 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0064d445-dfc2-4f0f-b4ae-26ebc2a1d3ea-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"0064d445-dfc2-4f0f-b4ae-26ebc2a1d3ea\") " pod="openstack/kube-state-metrics-0" Sep 29 09:48:59 crc kubenswrapper[4779]: I0929 09:48:59.750179 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v88gl\" (UniqueName: \"kubernetes.io/projected/0064d445-dfc2-4f0f-b4ae-26ebc2a1d3ea-kube-api-access-v88gl\") pod \"kube-state-metrics-0\" (UID: \"0064d445-dfc2-4f0f-b4ae-26ebc2a1d3ea\") " pod="openstack/kube-state-metrics-0" Sep 29 09:48:59 crc kubenswrapper[4779]: I0929 09:48:59.851977 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v88gl\" (UniqueName: \"kubernetes.io/projected/0064d445-dfc2-4f0f-b4ae-26ebc2a1d3ea-kube-api-access-v88gl\") pod \"kube-state-metrics-0\" (UID: \"0064d445-dfc2-4f0f-b4ae-26ebc2a1d3ea\") " pod="openstack/kube-state-metrics-0" Sep 29 09:48:59 crc kubenswrapper[4779]: I0929 09:48:59.852116 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/0064d445-dfc2-4f0f-b4ae-26ebc2a1d3ea-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"0064d445-dfc2-4f0f-b4ae-26ebc2a1d3ea\") " pod="openstack/kube-state-metrics-0" Sep 29 09:48:59 crc kubenswrapper[4779]: I0929 09:48:59.852142 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/0064d445-dfc2-4f0f-b4ae-26ebc2a1d3ea-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"0064d445-dfc2-4f0f-b4ae-26ebc2a1d3ea\") " pod="openstack/kube-state-metrics-0" Sep 29 09:48:59 crc kubenswrapper[4779]: I0929 09:48:59.852173 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0064d445-dfc2-4f0f-b4ae-26ebc2a1d3ea-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"0064d445-dfc2-4f0f-b4ae-26ebc2a1d3ea\") " pod="openstack/kube-state-metrics-0" Sep 29 09:48:59 crc kubenswrapper[4779]: I0929 09:48:59.855655 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/0064d445-dfc2-4f0f-b4ae-26ebc2a1d3ea-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"0064d445-dfc2-4f0f-b4ae-26ebc2a1d3ea\") " pod="openstack/kube-state-metrics-0" Sep 29 09:48:59 crc kubenswrapper[4779]: I0929 09:48:59.855810 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/0064d445-dfc2-4f0f-b4ae-26ebc2a1d3ea-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"0064d445-dfc2-4f0f-b4ae-26ebc2a1d3ea\") " pod="openstack/kube-state-metrics-0" Sep 29 09:48:59 crc kubenswrapper[4779]: I0929 09:48:59.865653 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0064d445-dfc2-4f0f-b4ae-26ebc2a1d3ea-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"0064d445-dfc2-4f0f-b4ae-26ebc2a1d3ea\") " pod="openstack/kube-state-metrics-0" Sep 29 09:48:59 crc kubenswrapper[4779]: I0929 09:48:59.875055 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v88gl\" (UniqueName: \"kubernetes.io/projected/0064d445-dfc2-4f0f-b4ae-26ebc2a1d3ea-kube-api-access-v88gl\") pod \"kube-state-metrics-0\" (UID: \"0064d445-dfc2-4f0f-b4ae-26ebc2a1d3ea\") " pod="openstack/kube-state-metrics-0" Sep 29 09:48:59 crc kubenswrapper[4779]: I0929 09:48:59.904295 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 29 09:48:59 crc kubenswrapper[4779]: I0929 09:48:59.946275 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 09:48:59 crc kubenswrapper[4779]: I0929 09:48:59.946515 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7e9c6ab7-50a1-49d3-a6df-71646c0e9a89" containerName="ceilometer-central-agent" containerID="cri-o://063c04725d9702400807cf9bc08a7e659dc4aadc81922061b00bf5cae328734d" gracePeriod=30 Sep 29 09:48:59 crc kubenswrapper[4779]: I0929 09:48:59.946944 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7e9c6ab7-50a1-49d3-a6df-71646c0e9a89" containerName="proxy-httpd" containerID="cri-o://6601568f820f36b29e5ce19dc8d3c63a5bdf7625288e80c9c82ca6a710f46427" gracePeriod=30 Sep 29 09:48:59 crc kubenswrapper[4779]: I0929 09:48:59.946992 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7e9c6ab7-50a1-49d3-a6df-71646c0e9a89" containerName="sg-core" containerID="cri-o://b67d0d04104a62c7e59b64734f81150e97dd4eebd53d2d6143eea4c2e81d9030" gracePeriod=30 Sep 29 09:48:59 crc kubenswrapper[4779]: I0929 09:48:59.947026 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7e9c6ab7-50a1-49d3-a6df-71646c0e9a89" containerName="ceilometer-notification-agent" containerID="cri-o://4c56ffa6cad33de34ec06f2502ca20b3287f4ecc3db23245a0f665a33a3db443" gracePeriod=30 Sep 29 09:49:00 crc kubenswrapper[4779]: I0929 09:49:00.400664 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 09:49:00 crc kubenswrapper[4779]: W0929 09:49:00.401568 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0064d445_dfc2_4f0f_b4ae_26ebc2a1d3ea.slice/crio-965b626f4262dd282778901431a73a01fe2b8f0b52ecd9b92320a124bdb61e0c WatchSource:0}: Error finding container 965b626f4262dd282778901431a73a01fe2b8f0b52ecd9b92320a124bdb61e0c: Status 404 returned error can't find the container with id 965b626f4262dd282778901431a73a01fe2b8f0b52ecd9b92320a124bdb61e0c Sep 29 09:49:00 crc kubenswrapper[4779]: I0929 09:49:00.500037 4779 generic.go:334] "Generic (PLEG): container finished" podID="7e9c6ab7-50a1-49d3-a6df-71646c0e9a89" containerID="6601568f820f36b29e5ce19dc8d3c63a5bdf7625288e80c9c82ca6a710f46427" exitCode=0 Sep 29 09:49:00 crc kubenswrapper[4779]: I0929 09:49:00.500060 4779 generic.go:334] "Generic (PLEG): container finished" podID="7e9c6ab7-50a1-49d3-a6df-71646c0e9a89" containerID="b67d0d04104a62c7e59b64734f81150e97dd4eebd53d2d6143eea4c2e81d9030" exitCode=2 Sep 29 09:49:00 crc kubenswrapper[4779]: I0929 09:49:00.500067 4779 generic.go:334] "Generic (PLEG): container finished" podID="7e9c6ab7-50a1-49d3-a6df-71646c0e9a89" containerID="063c04725d9702400807cf9bc08a7e659dc4aadc81922061b00bf5cae328734d" exitCode=0 Sep 29 09:49:00 crc kubenswrapper[4779]: I0929 09:49:00.500102 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7e9c6ab7-50a1-49d3-a6df-71646c0e9a89","Type":"ContainerDied","Data":"6601568f820f36b29e5ce19dc8d3c63a5bdf7625288e80c9c82ca6a710f46427"} Sep 29 09:49:00 crc kubenswrapper[4779]: I0929 09:49:00.500123 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7e9c6ab7-50a1-49d3-a6df-71646c0e9a89","Type":"ContainerDied","Data":"b67d0d04104a62c7e59b64734f81150e97dd4eebd53d2d6143eea4c2e81d9030"} Sep 29 09:49:00 crc kubenswrapper[4779]: I0929 09:49:00.500135 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7e9c6ab7-50a1-49d3-a6df-71646c0e9a89","Type":"ContainerDied","Data":"063c04725d9702400807cf9bc08a7e659dc4aadc81922061b00bf5cae328734d"} Sep 29 09:49:00 crc kubenswrapper[4779]: I0929 09:49:00.503360 4779 generic.go:334] "Generic (PLEG): container finished" podID="db25293b-28ae-40dc-a75f-86de34677919" containerID="42175e97ef30e091bd98a7f085d98538d528664c6d59cf720074da433264fe75" exitCode=0 Sep 29 09:49:00 crc kubenswrapper[4779]: I0929 09:49:00.503487 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-p6zkw" event={"ID":"db25293b-28ae-40dc-a75f-86de34677919","Type":"ContainerDied","Data":"42175e97ef30e091bd98a7f085d98538d528664c6d59cf720074da433264fe75"} Sep 29 09:49:00 crc kubenswrapper[4779]: I0929 09:49:00.505133 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"0064d445-dfc2-4f0f-b4ae-26ebc2a1d3ea","Type":"ContainerStarted","Data":"965b626f4262dd282778901431a73a01fe2b8f0b52ecd9b92320a124bdb61e0c"} Sep 29 09:49:00 crc kubenswrapper[4779]: I0929 09:49:00.710179 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 29 09:49:00 crc kubenswrapper[4779]: I0929 09:49:00.710240 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 29 09:49:00 crc kubenswrapper[4779]: I0929 09:49:00.728965 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eb6a2725-2f24-4e1f-9791-d544f59eddeb" path="/var/lib/kubelet/pods/eb6a2725-2f24-4e1f-9791-d544f59eddeb/volumes" Sep 29 09:49:00 crc kubenswrapper[4779]: I0929 09:49:00.974218 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Sep 29 09:49:01 crc kubenswrapper[4779]: I0929 09:49:01.008742 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Sep 29 09:49:01 crc kubenswrapper[4779]: I0929 09:49:01.015062 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6c5b8df6f5-82kwj" Sep 29 09:49:01 crc kubenswrapper[4779]: I0929 09:49:01.149218 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77d9688d49-649qz"] Sep 29 09:49:01 crc kubenswrapper[4779]: I0929 09:49:01.150031 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-77d9688d49-649qz" podUID="ab1c8bfe-eb10-4813-87dc-0df1ea736205" containerName="dnsmasq-dns" containerID="cri-o://8dc7293ff6c7eb5e144eaa1c94f807f49d8892149ac27496adbd77f5afa9c592" gracePeriod=10 Sep 29 09:49:01 crc kubenswrapper[4779]: I0929 09:49:01.526034 4779 generic.go:334] "Generic (PLEG): container finished" podID="32f92928-6ba4-4824-94ac-8e20efe26e67" containerID="8b8b9cc7488074b98f47def0550430ef1ae160698979702264ad74e926ac6fe0" exitCode=0 Sep 29 09:49:01 crc kubenswrapper[4779]: I0929 09:49:01.526149 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-6s6xf" event={"ID":"32f92928-6ba4-4824-94ac-8e20efe26e67","Type":"ContainerDied","Data":"8b8b9cc7488074b98f47def0550430ef1ae160698979702264ad74e926ac6fe0"} Sep 29 09:49:01 crc kubenswrapper[4779]: I0929 09:49:01.531339 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"0064d445-dfc2-4f0f-b4ae-26ebc2a1d3ea","Type":"ContainerStarted","Data":"1fe97647529f8cab7161860859c27f98b76ed4c573a74abf5836ee774cc7d5be"} Sep 29 09:49:01 crc kubenswrapper[4779]: I0929 09:49:01.532449 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Sep 29 09:49:01 crc kubenswrapper[4779]: I0929 09:49:01.537465 4779 generic.go:334] "Generic (PLEG): container finished" podID="ab1c8bfe-eb10-4813-87dc-0df1ea736205" containerID="8dc7293ff6c7eb5e144eaa1c94f807f49d8892149ac27496adbd77f5afa9c592" exitCode=0 Sep 29 09:49:01 crc kubenswrapper[4779]: I0929 09:49:01.537531 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77d9688d49-649qz" event={"ID":"ab1c8bfe-eb10-4813-87dc-0df1ea736205","Type":"ContainerDied","Data":"8dc7293ff6c7eb5e144eaa1c94f807f49d8892149ac27496adbd77f5afa9c592"} Sep 29 09:49:01 crc kubenswrapper[4779]: I0929 09:49:01.540389 4779 generic.go:334] "Generic (PLEG): container finished" podID="7e9c6ab7-50a1-49d3-a6df-71646c0e9a89" containerID="4c56ffa6cad33de34ec06f2502ca20b3287f4ecc3db23245a0f665a33a3db443" exitCode=0 Sep 29 09:49:01 crc kubenswrapper[4779]: I0929 09:49:01.540717 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7e9c6ab7-50a1-49d3-a6df-71646c0e9a89","Type":"ContainerDied","Data":"4c56ffa6cad33de34ec06f2502ca20b3287f4ecc3db23245a0f665a33a3db443"} Sep 29 09:49:01 crc kubenswrapper[4779]: I0929 09:49:01.586732 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.179150195 podStartE2EDuration="2.586701556s" podCreationTimestamp="2025-09-29 09:48:59 +0000 UTC" firstStartedPulling="2025-09-29 09:49:00.403919055 +0000 UTC m=+1172.385242959" lastFinishedPulling="2025-09-29 09:49:00.811470416 +0000 UTC m=+1172.792794320" observedRunningTime="2025-09-29 09:49:01.564194842 +0000 UTC m=+1173.545518766" watchObservedRunningTime="2025-09-29 09:49:01.586701556 +0000 UTC m=+1173.568025450" Sep 29 09:49:01 crc kubenswrapper[4779]: I0929 09:49:01.596971 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Sep 29 09:49:01 crc kubenswrapper[4779]: I0929 09:49:01.660830 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77d9688d49-649qz" Sep 29 09:49:01 crc kubenswrapper[4779]: I0929 09:49:01.792133 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="797a0428-592e-44f8-a1c4-78275dab99c7" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.188:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 09:49:01 crc kubenswrapper[4779]: I0929 09:49:01.792290 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="797a0428-592e-44f8-a1c4-78275dab99c7" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.188:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 09:49:01 crc kubenswrapper[4779]: I0929 09:49:01.805572 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ab1c8bfe-eb10-4813-87dc-0df1ea736205-ovsdbserver-sb\") pod \"ab1c8bfe-eb10-4813-87dc-0df1ea736205\" (UID: \"ab1c8bfe-eb10-4813-87dc-0df1ea736205\") " Sep 29 09:49:01 crc kubenswrapper[4779]: I0929 09:49:01.805675 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ab1c8bfe-eb10-4813-87dc-0df1ea736205-config\") pod \"ab1c8bfe-eb10-4813-87dc-0df1ea736205\" (UID: \"ab1c8bfe-eb10-4813-87dc-0df1ea736205\") " Sep 29 09:49:01 crc kubenswrapper[4779]: I0929 09:49:01.805717 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-smd7n\" (UniqueName: \"kubernetes.io/projected/ab1c8bfe-eb10-4813-87dc-0df1ea736205-kube-api-access-smd7n\") pod \"ab1c8bfe-eb10-4813-87dc-0df1ea736205\" (UID: \"ab1c8bfe-eb10-4813-87dc-0df1ea736205\") " Sep 29 09:49:01 crc kubenswrapper[4779]: I0929 09:49:01.805879 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ab1c8bfe-eb10-4813-87dc-0df1ea736205-ovsdbserver-nb\") pod \"ab1c8bfe-eb10-4813-87dc-0df1ea736205\" (UID: \"ab1c8bfe-eb10-4813-87dc-0df1ea736205\") " Sep 29 09:49:01 crc kubenswrapper[4779]: I0929 09:49:01.805928 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ab1c8bfe-eb10-4813-87dc-0df1ea736205-dns-svc\") pod \"ab1c8bfe-eb10-4813-87dc-0df1ea736205\" (UID: \"ab1c8bfe-eb10-4813-87dc-0df1ea736205\") " Sep 29 09:49:01 crc kubenswrapper[4779]: I0929 09:49:01.813074 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab1c8bfe-eb10-4813-87dc-0df1ea736205-kube-api-access-smd7n" (OuterVolumeSpecName: "kube-api-access-smd7n") pod "ab1c8bfe-eb10-4813-87dc-0df1ea736205" (UID: "ab1c8bfe-eb10-4813-87dc-0df1ea736205"). InnerVolumeSpecName "kube-api-access-smd7n". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:49:01 crc kubenswrapper[4779]: I0929 09:49:01.842306 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 09:49:01 crc kubenswrapper[4779]: I0929 09:49:01.874821 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 29 09:49:01 crc kubenswrapper[4779]: I0929 09:49:01.875809 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 29 09:49:01 crc kubenswrapper[4779]: I0929 09:49:01.891318 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab1c8bfe-eb10-4813-87dc-0df1ea736205-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "ab1c8bfe-eb10-4813-87dc-0df1ea736205" (UID: "ab1c8bfe-eb10-4813-87dc-0df1ea736205"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:49:01 crc kubenswrapper[4779]: I0929 09:49:01.892518 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab1c8bfe-eb10-4813-87dc-0df1ea736205-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "ab1c8bfe-eb10-4813-87dc-0df1ea736205" (UID: "ab1c8bfe-eb10-4813-87dc-0df1ea736205"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:49:01 crc kubenswrapper[4779]: I0929 09:49:01.892697 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab1c8bfe-eb10-4813-87dc-0df1ea736205-config" (OuterVolumeSpecName: "config") pod "ab1c8bfe-eb10-4813-87dc-0df1ea736205" (UID: "ab1c8bfe-eb10-4813-87dc-0df1ea736205"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:49:01 crc kubenswrapper[4779]: I0929 09:49:01.908660 4779 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ab1c8bfe-eb10-4813-87dc-0df1ea736205-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:01 crc kubenswrapper[4779]: I0929 09:49:01.908706 4779 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ab1c8bfe-eb10-4813-87dc-0df1ea736205-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:01 crc kubenswrapper[4779]: I0929 09:49:01.908719 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ab1c8bfe-eb10-4813-87dc-0df1ea736205-config\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:01 crc kubenswrapper[4779]: I0929 09:49:01.908731 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-smd7n\" (UniqueName: \"kubernetes.io/projected/ab1c8bfe-eb10-4813-87dc-0df1ea736205-kube-api-access-smd7n\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:01 crc kubenswrapper[4779]: I0929 09:49:01.923337 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab1c8bfe-eb10-4813-87dc-0df1ea736205-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ab1c8bfe-eb10-4813-87dc-0df1ea736205" (UID: "ab1c8bfe-eb10-4813-87dc-0df1ea736205"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.009603 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e9c6ab7-50a1-49d3-a6df-71646c0e9a89-scripts\") pod \"7e9c6ab7-50a1-49d3-a6df-71646c0e9a89\" (UID: \"7e9c6ab7-50a1-49d3-a6df-71646c0e9a89\") " Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.009695 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7e9c6ab7-50a1-49d3-a6df-71646c0e9a89-sg-core-conf-yaml\") pod \"7e9c6ab7-50a1-49d3-a6df-71646c0e9a89\" (UID: \"7e9c6ab7-50a1-49d3-a6df-71646c0e9a89\") " Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.009775 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7e9c6ab7-50a1-49d3-a6df-71646c0e9a89-log-httpd\") pod \"7e9c6ab7-50a1-49d3-a6df-71646c0e9a89\" (UID: \"7e9c6ab7-50a1-49d3-a6df-71646c0e9a89\") " Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.009860 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e9c6ab7-50a1-49d3-a6df-71646c0e9a89-combined-ca-bundle\") pod \"7e9c6ab7-50a1-49d3-a6df-71646c0e9a89\" (UID: \"7e9c6ab7-50a1-49d3-a6df-71646c0e9a89\") " Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.009918 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q6dpk\" (UniqueName: \"kubernetes.io/projected/7e9c6ab7-50a1-49d3-a6df-71646c0e9a89-kube-api-access-q6dpk\") pod \"7e9c6ab7-50a1-49d3-a6df-71646c0e9a89\" (UID: \"7e9c6ab7-50a1-49d3-a6df-71646c0e9a89\") " Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.009941 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e9c6ab7-50a1-49d3-a6df-71646c0e9a89-config-data\") pod \"7e9c6ab7-50a1-49d3-a6df-71646c0e9a89\" (UID: \"7e9c6ab7-50a1-49d3-a6df-71646c0e9a89\") " Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.009963 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7e9c6ab7-50a1-49d3-a6df-71646c0e9a89-run-httpd\") pod \"7e9c6ab7-50a1-49d3-a6df-71646c0e9a89\" (UID: \"7e9c6ab7-50a1-49d3-a6df-71646c0e9a89\") " Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.010506 4779 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ab1c8bfe-eb10-4813-87dc-0df1ea736205-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.011330 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7e9c6ab7-50a1-49d3-a6df-71646c0e9a89-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "7e9c6ab7-50a1-49d3-a6df-71646c0e9a89" (UID: "7e9c6ab7-50a1-49d3-a6df-71646c0e9a89"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.011863 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7e9c6ab7-50a1-49d3-a6df-71646c0e9a89-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "7e9c6ab7-50a1-49d3-a6df-71646c0e9a89" (UID: "7e9c6ab7-50a1-49d3-a6df-71646c0e9a89"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.014605 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e9c6ab7-50a1-49d3-a6df-71646c0e9a89-scripts" (OuterVolumeSpecName: "scripts") pod "7e9c6ab7-50a1-49d3-a6df-71646c0e9a89" (UID: "7e9c6ab7-50a1-49d3-a6df-71646c0e9a89"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.016200 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7e9c6ab7-50a1-49d3-a6df-71646c0e9a89-kube-api-access-q6dpk" (OuterVolumeSpecName: "kube-api-access-q6dpk") pod "7e9c6ab7-50a1-49d3-a6df-71646c0e9a89" (UID: "7e9c6ab7-50a1-49d3-a6df-71646c0e9a89"). InnerVolumeSpecName "kube-api-access-q6dpk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.064178 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e9c6ab7-50a1-49d3-a6df-71646c0e9a89-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "7e9c6ab7-50a1-49d3-a6df-71646c0e9a89" (UID: "7e9c6ab7-50a1-49d3-a6df-71646c0e9a89"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.113192 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q6dpk\" (UniqueName: \"kubernetes.io/projected/7e9c6ab7-50a1-49d3-a6df-71646c0e9a89-kube-api-access-q6dpk\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.113218 4779 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7e9c6ab7-50a1-49d3-a6df-71646c0e9a89-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.113228 4779 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e9c6ab7-50a1-49d3-a6df-71646c0e9a89-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.113237 4779 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7e9c6ab7-50a1-49d3-a6df-71646c0e9a89-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.113244 4779 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7e9c6ab7-50a1-49d3-a6df-71646c0e9a89-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.120027 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e9c6ab7-50a1-49d3-a6df-71646c0e9a89-config-data" (OuterVolumeSpecName: "config-data") pod "7e9c6ab7-50a1-49d3-a6df-71646c0e9a89" (UID: "7e9c6ab7-50a1-49d3-a6df-71646c0e9a89"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.130122 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e9c6ab7-50a1-49d3-a6df-71646c0e9a89-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7e9c6ab7-50a1-49d3-a6df-71646c0e9a89" (UID: "7e9c6ab7-50a1-49d3-a6df-71646c0e9a89"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.158172 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-p6zkw" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.214990 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e9c6ab7-50a1-49d3-a6df-71646c0e9a89-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.215027 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e9c6ab7-50a1-49d3-a6df-71646c0e9a89-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.316752 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m4skp\" (UniqueName: \"kubernetes.io/projected/db25293b-28ae-40dc-a75f-86de34677919-kube-api-access-m4skp\") pod \"db25293b-28ae-40dc-a75f-86de34677919\" (UID: \"db25293b-28ae-40dc-a75f-86de34677919\") " Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.317121 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db25293b-28ae-40dc-a75f-86de34677919-combined-ca-bundle\") pod \"db25293b-28ae-40dc-a75f-86de34677919\" (UID: \"db25293b-28ae-40dc-a75f-86de34677919\") " Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.317314 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db25293b-28ae-40dc-a75f-86de34677919-config-data\") pod \"db25293b-28ae-40dc-a75f-86de34677919\" (UID: \"db25293b-28ae-40dc-a75f-86de34677919\") " Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.317416 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/db25293b-28ae-40dc-a75f-86de34677919-scripts\") pod \"db25293b-28ae-40dc-a75f-86de34677919\" (UID: \"db25293b-28ae-40dc-a75f-86de34677919\") " Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.320352 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db25293b-28ae-40dc-a75f-86de34677919-scripts" (OuterVolumeSpecName: "scripts") pod "db25293b-28ae-40dc-a75f-86de34677919" (UID: "db25293b-28ae-40dc-a75f-86de34677919"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.324080 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/db25293b-28ae-40dc-a75f-86de34677919-kube-api-access-m4skp" (OuterVolumeSpecName: "kube-api-access-m4skp") pod "db25293b-28ae-40dc-a75f-86de34677919" (UID: "db25293b-28ae-40dc-a75f-86de34677919"). InnerVolumeSpecName "kube-api-access-m4skp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.356421 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db25293b-28ae-40dc-a75f-86de34677919-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "db25293b-28ae-40dc-a75f-86de34677919" (UID: "db25293b-28ae-40dc-a75f-86de34677919"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.356434 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db25293b-28ae-40dc-a75f-86de34677919-config-data" (OuterVolumeSpecName: "config-data") pod "db25293b-28ae-40dc-a75f-86de34677919" (UID: "db25293b-28ae-40dc-a75f-86de34677919"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.420369 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db25293b-28ae-40dc-a75f-86de34677919-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.420411 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db25293b-28ae-40dc-a75f-86de34677919-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.420422 4779 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/db25293b-28ae-40dc-a75f-86de34677919-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.420432 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m4skp\" (UniqueName: \"kubernetes.io/projected/db25293b-28ae-40dc-a75f-86de34677919-kube-api-access-m4skp\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.552215 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77d9688d49-649qz" event={"ID":"ab1c8bfe-eb10-4813-87dc-0df1ea736205","Type":"ContainerDied","Data":"2978623c6571cde712c0edb24a0d6df1d39db6eb2d22a744dfaf8785b1808dc6"} Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.552277 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77d9688d49-649qz" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.553079 4779 scope.go:117] "RemoveContainer" containerID="8dc7293ff6c7eb5e144eaa1c94f807f49d8892149ac27496adbd77f5afa9c592" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.564304 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.564597 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7e9c6ab7-50a1-49d3-a6df-71646c0e9a89","Type":"ContainerDied","Data":"26c60eb886a81886a03f5eae5fe86216473255dd2d8924539fc395dc319c2a33"} Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.572163 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-p6zkw" event={"ID":"db25293b-28ae-40dc-a75f-86de34677919","Type":"ContainerDied","Data":"3413861388b42ef044dc04599b22801568c92b4d1c1063f447923f51556e19ef"} Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.572196 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3413861388b42ef044dc04599b22801568c92b4d1c1063f447923f51556e19ef" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.572172 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-p6zkw" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.588692 4779 scope.go:117] "RemoveContainer" containerID="211259e34db11dd31d7e7753dca18f0ebf98d95f5551ab977a8a8322c6a848ec" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.622808 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.647058 4779 scope.go:117] "RemoveContainer" containerID="6601568f820f36b29e5ce19dc8d3c63a5bdf7625288e80c9c82ca6a710f46427" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.675956 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.693041 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77d9688d49-649qz"] Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.698323 4779 scope.go:117] "RemoveContainer" containerID="b67d0d04104a62c7e59b64734f81150e97dd4eebd53d2d6143eea4c2e81d9030" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.734537 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7e9c6ab7-50a1-49d3-a6df-71646c0e9a89" path="/var/lib/kubelet/pods/7e9c6ab7-50a1-49d3-a6df-71646c0e9a89/volumes" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.735503 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-77d9688d49-649qz"] Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.735600 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 09:49:02 crc kubenswrapper[4779]: E0929 09:49:02.735968 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db25293b-28ae-40dc-a75f-86de34677919" containerName="nova-manage" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.736084 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="db25293b-28ae-40dc-a75f-86de34677919" containerName="nova-manage" Sep 29 09:49:02 crc kubenswrapper[4779]: E0929 09:49:02.736154 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab1c8bfe-eb10-4813-87dc-0df1ea736205" containerName="init" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.736213 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab1c8bfe-eb10-4813-87dc-0df1ea736205" containerName="init" Sep 29 09:49:02 crc kubenswrapper[4779]: E0929 09:49:02.736280 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab1c8bfe-eb10-4813-87dc-0df1ea736205" containerName="dnsmasq-dns" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.736329 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab1c8bfe-eb10-4813-87dc-0df1ea736205" containerName="dnsmasq-dns" Sep 29 09:49:02 crc kubenswrapper[4779]: E0929 09:49:02.736383 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e9c6ab7-50a1-49d3-a6df-71646c0e9a89" containerName="sg-core" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.736433 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e9c6ab7-50a1-49d3-a6df-71646c0e9a89" containerName="sg-core" Sep 29 09:49:02 crc kubenswrapper[4779]: E0929 09:49:02.736487 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e9c6ab7-50a1-49d3-a6df-71646c0e9a89" containerName="proxy-httpd" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.736537 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e9c6ab7-50a1-49d3-a6df-71646c0e9a89" containerName="proxy-httpd" Sep 29 09:49:02 crc kubenswrapper[4779]: E0929 09:49:02.736615 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e9c6ab7-50a1-49d3-a6df-71646c0e9a89" containerName="ceilometer-notification-agent" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.736701 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e9c6ab7-50a1-49d3-a6df-71646c0e9a89" containerName="ceilometer-notification-agent" Sep 29 09:49:02 crc kubenswrapper[4779]: E0929 09:49:02.736781 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e9c6ab7-50a1-49d3-a6df-71646c0e9a89" containerName="ceilometer-central-agent" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.736837 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e9c6ab7-50a1-49d3-a6df-71646c0e9a89" containerName="ceilometer-central-agent" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.737415 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e9c6ab7-50a1-49d3-a6df-71646c0e9a89" containerName="ceilometer-central-agent" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.737501 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e9c6ab7-50a1-49d3-a6df-71646c0e9a89" containerName="sg-core" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.737568 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="db25293b-28ae-40dc-a75f-86de34677919" containerName="nova-manage" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.737642 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e9c6ab7-50a1-49d3-a6df-71646c0e9a89" containerName="proxy-httpd" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.737737 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab1c8bfe-eb10-4813-87dc-0df1ea736205" containerName="dnsmasq-dns" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.737823 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e9c6ab7-50a1-49d3-a6df-71646c0e9a89" containerName="ceilometer-notification-agent" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.741087 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.748074 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.752757 4779 scope.go:117] "RemoveContainer" containerID="4c56ffa6cad33de34ec06f2502ca20b3287f4ecc3db23245a0f665a33a3db443" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.752986 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.753002 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.754048 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.783204 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.783786 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="797a0428-592e-44f8-a1c4-78275dab99c7" containerName="nova-api-log" containerID="cri-o://9be430b40b49986839bf6f6725ac2cbfffecfc00f565076bdea037ca9648e19a" gracePeriod=30 Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.784727 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="797a0428-592e-44f8-a1c4-78275dab99c7" containerName="nova-api-api" containerID="cri-o://7228de1311977a530ea2451e7e6ff647d19cc8ec9779253fb1e1b55f451df11c" gracePeriod=30 Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.797264 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.802767 4779 scope.go:117] "RemoveContainer" containerID="063c04725d9702400807cf9bc08a7e659dc4aadc81922061b00bf5cae328734d" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.813967 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.827353 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3074cf44-792f-4339-86f3-cd9dcdcd534f-scripts\") pod \"ceilometer-0\" (UID: \"3074cf44-792f-4339-86f3-cd9dcdcd534f\") " pod="openstack/ceilometer-0" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.828153 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/3074cf44-792f-4339-86f3-cd9dcdcd534f-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"3074cf44-792f-4339-86f3-cd9dcdcd534f\") " pod="openstack/ceilometer-0" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.828231 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3074cf44-792f-4339-86f3-cd9dcdcd534f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3074cf44-792f-4339-86f3-cd9dcdcd534f\") " pod="openstack/ceilometer-0" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.828277 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3074cf44-792f-4339-86f3-cd9dcdcd534f-log-httpd\") pod \"ceilometer-0\" (UID: \"3074cf44-792f-4339-86f3-cd9dcdcd534f\") " pod="openstack/ceilometer-0" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.828482 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3074cf44-792f-4339-86f3-cd9dcdcd534f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3074cf44-792f-4339-86f3-cd9dcdcd534f\") " pod="openstack/ceilometer-0" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.828529 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3074cf44-792f-4339-86f3-cd9dcdcd534f-config-data\") pod \"ceilometer-0\" (UID: \"3074cf44-792f-4339-86f3-cd9dcdcd534f\") " pod="openstack/ceilometer-0" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.828552 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9n5tm\" (UniqueName: \"kubernetes.io/projected/3074cf44-792f-4339-86f3-cd9dcdcd534f-kube-api-access-9n5tm\") pod \"ceilometer-0\" (UID: \"3074cf44-792f-4339-86f3-cd9dcdcd534f\") " pod="openstack/ceilometer-0" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.829509 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3074cf44-792f-4339-86f3-cd9dcdcd534f-run-httpd\") pod \"ceilometer-0\" (UID: \"3074cf44-792f-4339-86f3-cd9dcdcd534f\") " pod="openstack/ceilometer-0" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.931978 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3074cf44-792f-4339-86f3-cd9dcdcd534f-run-httpd\") pod \"ceilometer-0\" (UID: \"3074cf44-792f-4339-86f3-cd9dcdcd534f\") " pod="openstack/ceilometer-0" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.932091 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3074cf44-792f-4339-86f3-cd9dcdcd534f-scripts\") pod \"ceilometer-0\" (UID: \"3074cf44-792f-4339-86f3-cd9dcdcd534f\") " pod="openstack/ceilometer-0" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.932119 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/3074cf44-792f-4339-86f3-cd9dcdcd534f-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"3074cf44-792f-4339-86f3-cd9dcdcd534f\") " pod="openstack/ceilometer-0" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.932151 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3074cf44-792f-4339-86f3-cd9dcdcd534f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3074cf44-792f-4339-86f3-cd9dcdcd534f\") " pod="openstack/ceilometer-0" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.932184 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3074cf44-792f-4339-86f3-cd9dcdcd534f-log-httpd\") pod \"ceilometer-0\" (UID: \"3074cf44-792f-4339-86f3-cd9dcdcd534f\") " pod="openstack/ceilometer-0" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.932268 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3074cf44-792f-4339-86f3-cd9dcdcd534f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3074cf44-792f-4339-86f3-cd9dcdcd534f\") " pod="openstack/ceilometer-0" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.932299 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3074cf44-792f-4339-86f3-cd9dcdcd534f-config-data\") pod \"ceilometer-0\" (UID: \"3074cf44-792f-4339-86f3-cd9dcdcd534f\") " pod="openstack/ceilometer-0" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.932325 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9n5tm\" (UniqueName: \"kubernetes.io/projected/3074cf44-792f-4339-86f3-cd9dcdcd534f-kube-api-access-9n5tm\") pod \"ceilometer-0\" (UID: \"3074cf44-792f-4339-86f3-cd9dcdcd534f\") " pod="openstack/ceilometer-0" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.932489 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3074cf44-792f-4339-86f3-cd9dcdcd534f-run-httpd\") pod \"ceilometer-0\" (UID: \"3074cf44-792f-4339-86f3-cd9dcdcd534f\") " pod="openstack/ceilometer-0" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.932974 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3074cf44-792f-4339-86f3-cd9dcdcd534f-log-httpd\") pod \"ceilometer-0\" (UID: \"3074cf44-792f-4339-86f3-cd9dcdcd534f\") " pod="openstack/ceilometer-0" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.937400 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3074cf44-792f-4339-86f3-cd9dcdcd534f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3074cf44-792f-4339-86f3-cd9dcdcd534f\") " pod="openstack/ceilometer-0" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.937488 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/3074cf44-792f-4339-86f3-cd9dcdcd534f-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"3074cf44-792f-4339-86f3-cd9dcdcd534f\") " pod="openstack/ceilometer-0" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.937930 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3074cf44-792f-4339-86f3-cd9dcdcd534f-config-data\") pod \"ceilometer-0\" (UID: \"3074cf44-792f-4339-86f3-cd9dcdcd534f\") " pod="openstack/ceilometer-0" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.938328 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3074cf44-792f-4339-86f3-cd9dcdcd534f-scripts\") pod \"ceilometer-0\" (UID: \"3074cf44-792f-4339-86f3-cd9dcdcd534f\") " pod="openstack/ceilometer-0" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.940209 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3074cf44-792f-4339-86f3-cd9dcdcd534f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3074cf44-792f-4339-86f3-cd9dcdcd534f\") " pod="openstack/ceilometer-0" Sep 29 09:49:02 crc kubenswrapper[4779]: I0929 09:49:02.951006 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9n5tm\" (UniqueName: \"kubernetes.io/projected/3074cf44-792f-4339-86f3-cd9dcdcd534f-kube-api-access-9n5tm\") pod \"ceilometer-0\" (UID: \"3074cf44-792f-4339-86f3-cd9dcdcd534f\") " pod="openstack/ceilometer-0" Sep 29 09:49:03 crc kubenswrapper[4779]: I0929 09:49:03.065167 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 09:49:03 crc kubenswrapper[4779]: I0929 09:49:03.073767 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-6s6xf" Sep 29 09:49:03 crc kubenswrapper[4779]: I0929 09:49:03.238395 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32f92928-6ba4-4824-94ac-8e20efe26e67-combined-ca-bundle\") pod \"32f92928-6ba4-4824-94ac-8e20efe26e67\" (UID: \"32f92928-6ba4-4824-94ac-8e20efe26e67\") " Sep 29 09:49:03 crc kubenswrapper[4779]: I0929 09:49:03.238735 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fwzwb\" (UniqueName: \"kubernetes.io/projected/32f92928-6ba4-4824-94ac-8e20efe26e67-kube-api-access-fwzwb\") pod \"32f92928-6ba4-4824-94ac-8e20efe26e67\" (UID: \"32f92928-6ba4-4824-94ac-8e20efe26e67\") " Sep 29 09:49:03 crc kubenswrapper[4779]: I0929 09:49:03.238991 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32f92928-6ba4-4824-94ac-8e20efe26e67-scripts\") pod \"32f92928-6ba4-4824-94ac-8e20efe26e67\" (UID: \"32f92928-6ba4-4824-94ac-8e20efe26e67\") " Sep 29 09:49:03 crc kubenswrapper[4779]: I0929 09:49:03.239057 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32f92928-6ba4-4824-94ac-8e20efe26e67-config-data\") pod \"32f92928-6ba4-4824-94ac-8e20efe26e67\" (UID: \"32f92928-6ba4-4824-94ac-8e20efe26e67\") " Sep 29 09:49:03 crc kubenswrapper[4779]: I0929 09:49:03.254220 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32f92928-6ba4-4824-94ac-8e20efe26e67-scripts" (OuterVolumeSpecName: "scripts") pod "32f92928-6ba4-4824-94ac-8e20efe26e67" (UID: "32f92928-6ba4-4824-94ac-8e20efe26e67"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:49:03 crc kubenswrapper[4779]: I0929 09:49:03.258465 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/32f92928-6ba4-4824-94ac-8e20efe26e67-kube-api-access-fwzwb" (OuterVolumeSpecName: "kube-api-access-fwzwb") pod "32f92928-6ba4-4824-94ac-8e20efe26e67" (UID: "32f92928-6ba4-4824-94ac-8e20efe26e67"). InnerVolumeSpecName "kube-api-access-fwzwb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:49:03 crc kubenswrapper[4779]: I0929 09:49:03.290617 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32f92928-6ba4-4824-94ac-8e20efe26e67-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "32f92928-6ba4-4824-94ac-8e20efe26e67" (UID: "32f92928-6ba4-4824-94ac-8e20efe26e67"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:49:03 crc kubenswrapper[4779]: I0929 09:49:03.295759 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32f92928-6ba4-4824-94ac-8e20efe26e67-config-data" (OuterVolumeSpecName: "config-data") pod "32f92928-6ba4-4824-94ac-8e20efe26e67" (UID: "32f92928-6ba4-4824-94ac-8e20efe26e67"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:49:03 crc kubenswrapper[4779]: I0929 09:49:03.351636 4779 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32f92928-6ba4-4824-94ac-8e20efe26e67-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:03 crc kubenswrapper[4779]: I0929 09:49:03.351677 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32f92928-6ba4-4824-94ac-8e20efe26e67-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:03 crc kubenswrapper[4779]: I0929 09:49:03.351689 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32f92928-6ba4-4824-94ac-8e20efe26e67-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:03 crc kubenswrapper[4779]: I0929 09:49:03.351700 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fwzwb\" (UniqueName: \"kubernetes.io/projected/32f92928-6ba4-4824-94ac-8e20efe26e67-kube-api-access-fwzwb\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:03 crc kubenswrapper[4779]: W0929 09:49:03.521474 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3074cf44_792f_4339_86f3_cd9dcdcd534f.slice/crio-7211ff1809b6efabef80354668837640a4704a5c9b87e44480c9d1e6608d8a4e WatchSource:0}: Error finding container 7211ff1809b6efabef80354668837640a4704a5c9b87e44480c9d1e6608d8a4e: Status 404 returned error can't find the container with id 7211ff1809b6efabef80354668837640a4704a5c9b87e44480c9d1e6608d8a4e Sep 29 09:49:03 crc kubenswrapper[4779]: I0929 09:49:03.528320 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 09:49:03 crc kubenswrapper[4779]: I0929 09:49:03.589544 4779 generic.go:334] "Generic (PLEG): container finished" podID="797a0428-592e-44f8-a1c4-78275dab99c7" containerID="9be430b40b49986839bf6f6725ac2cbfffecfc00f565076bdea037ca9648e19a" exitCode=143 Sep 29 09:49:03 crc kubenswrapper[4779]: I0929 09:49:03.589885 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"797a0428-592e-44f8-a1c4-78275dab99c7","Type":"ContainerDied","Data":"9be430b40b49986839bf6f6725ac2cbfffecfc00f565076bdea037ca9648e19a"} Sep 29 09:49:03 crc kubenswrapper[4779]: I0929 09:49:03.597836 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-6s6xf" event={"ID":"32f92928-6ba4-4824-94ac-8e20efe26e67","Type":"ContainerDied","Data":"1bb68dd99453afaf4d9d347e16661a17d05c238b71ea19e9287e13c98515127a"} Sep 29 09:49:03 crc kubenswrapper[4779]: I0929 09:49:03.597884 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1bb68dd99453afaf4d9d347e16661a17d05c238b71ea19e9287e13c98515127a" Sep 29 09:49:03 crc kubenswrapper[4779]: I0929 09:49:03.597981 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-6s6xf" Sep 29 09:49:03 crc kubenswrapper[4779]: I0929 09:49:03.601166 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3074cf44-792f-4339-86f3-cd9dcdcd534f","Type":"ContainerStarted","Data":"7211ff1809b6efabef80354668837640a4704a5c9b87e44480c9d1e6608d8a4e"} Sep 29 09:49:03 crc kubenswrapper[4779]: I0929 09:49:03.602362 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="498f7b6b-1eee-4b9a-b987-2ebdb93d0d11" containerName="nova-metadata-log" containerID="cri-o://1f046b998505a2559f3fdc535efea622ba972734d46f0e7feb526ee3f20e9330" gracePeriod=30 Sep 29 09:49:03 crc kubenswrapper[4779]: I0929 09:49:03.602727 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="7c5de83c-d54a-44bc-8cee-f95541ef5a0a" containerName="nova-scheduler-scheduler" containerID="cri-o://fce0e087c6951450a195fef03c79845bd12864e723ebac5002d1730b5c768669" gracePeriod=30 Sep 29 09:49:03 crc kubenswrapper[4779]: I0929 09:49:03.603161 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="498f7b6b-1eee-4b9a-b987-2ebdb93d0d11" containerName="nova-metadata-metadata" containerID="cri-o://9c747c3f8da7f8f9ecc0286d18c7958f90b976b20d3416af8c5fd47a134e114e" gracePeriod=30 Sep 29 09:49:03 crc kubenswrapper[4779]: I0929 09:49:03.657264 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 29 09:49:03 crc kubenswrapper[4779]: E0929 09:49:03.657759 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32f92928-6ba4-4824-94ac-8e20efe26e67" containerName="nova-cell1-conductor-db-sync" Sep 29 09:49:03 crc kubenswrapper[4779]: I0929 09:49:03.657781 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="32f92928-6ba4-4824-94ac-8e20efe26e67" containerName="nova-cell1-conductor-db-sync" Sep 29 09:49:03 crc kubenswrapper[4779]: I0929 09:49:03.658090 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="32f92928-6ba4-4824-94ac-8e20efe26e67" containerName="nova-cell1-conductor-db-sync" Sep 29 09:49:03 crc kubenswrapper[4779]: I0929 09:49:03.658896 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Sep 29 09:49:03 crc kubenswrapper[4779]: I0929 09:49:03.664700 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Sep 29 09:49:03 crc kubenswrapper[4779]: I0929 09:49:03.688277 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 29 09:49:03 crc kubenswrapper[4779]: I0929 09:49:03.760007 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9ae237e7-f974-47bd-803d-05af4bb116f1-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"9ae237e7-f974-47bd-803d-05af4bb116f1\") " pod="openstack/nova-cell1-conductor-0" Sep 29 09:49:03 crc kubenswrapper[4779]: I0929 09:49:03.760113 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pvtrr\" (UniqueName: \"kubernetes.io/projected/9ae237e7-f974-47bd-803d-05af4bb116f1-kube-api-access-pvtrr\") pod \"nova-cell1-conductor-0\" (UID: \"9ae237e7-f974-47bd-803d-05af4bb116f1\") " pod="openstack/nova-cell1-conductor-0" Sep 29 09:49:03 crc kubenswrapper[4779]: I0929 09:49:03.760201 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ae237e7-f974-47bd-803d-05af4bb116f1-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"9ae237e7-f974-47bd-803d-05af4bb116f1\") " pod="openstack/nova-cell1-conductor-0" Sep 29 09:49:03 crc kubenswrapper[4779]: I0929 09:49:03.863379 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9ae237e7-f974-47bd-803d-05af4bb116f1-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"9ae237e7-f974-47bd-803d-05af4bb116f1\") " pod="openstack/nova-cell1-conductor-0" Sep 29 09:49:03 crc kubenswrapper[4779]: I0929 09:49:03.863533 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pvtrr\" (UniqueName: \"kubernetes.io/projected/9ae237e7-f974-47bd-803d-05af4bb116f1-kube-api-access-pvtrr\") pod \"nova-cell1-conductor-0\" (UID: \"9ae237e7-f974-47bd-803d-05af4bb116f1\") " pod="openstack/nova-cell1-conductor-0" Sep 29 09:49:03 crc kubenswrapper[4779]: I0929 09:49:03.863570 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ae237e7-f974-47bd-803d-05af4bb116f1-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"9ae237e7-f974-47bd-803d-05af4bb116f1\") " pod="openstack/nova-cell1-conductor-0" Sep 29 09:49:03 crc kubenswrapper[4779]: I0929 09:49:03.868122 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9ae237e7-f974-47bd-803d-05af4bb116f1-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"9ae237e7-f974-47bd-803d-05af4bb116f1\") " pod="openstack/nova-cell1-conductor-0" Sep 29 09:49:03 crc kubenswrapper[4779]: I0929 09:49:03.869292 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ae237e7-f974-47bd-803d-05af4bb116f1-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"9ae237e7-f974-47bd-803d-05af4bb116f1\") " pod="openstack/nova-cell1-conductor-0" Sep 29 09:49:03 crc kubenswrapper[4779]: I0929 09:49:03.882959 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pvtrr\" (UniqueName: \"kubernetes.io/projected/9ae237e7-f974-47bd-803d-05af4bb116f1-kube-api-access-pvtrr\") pod \"nova-cell1-conductor-0\" (UID: \"9ae237e7-f974-47bd-803d-05af4bb116f1\") " pod="openstack/nova-cell1-conductor-0" Sep 29 09:49:03 crc kubenswrapper[4779]: I0929 09:49:03.998174 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.293156 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.305999 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.343248 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/watcher-decision-engine-0" Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.477297 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/498f7b6b-1eee-4b9a-b987-2ebdb93d0d11-logs\") pod \"498f7b6b-1eee-4b9a-b987-2ebdb93d0d11\" (UID: \"498f7b6b-1eee-4b9a-b987-2ebdb93d0d11\") " Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.477410 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/498f7b6b-1eee-4b9a-b987-2ebdb93d0d11-config-data\") pod \"498f7b6b-1eee-4b9a-b987-2ebdb93d0d11\" (UID: \"498f7b6b-1eee-4b9a-b987-2ebdb93d0d11\") " Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.477458 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/498f7b6b-1eee-4b9a-b987-2ebdb93d0d11-nova-metadata-tls-certs\") pod \"498f7b6b-1eee-4b9a-b987-2ebdb93d0d11\" (UID: \"498f7b6b-1eee-4b9a-b987-2ebdb93d0d11\") " Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.477547 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/498f7b6b-1eee-4b9a-b987-2ebdb93d0d11-combined-ca-bundle\") pod \"498f7b6b-1eee-4b9a-b987-2ebdb93d0d11\" (UID: \"498f7b6b-1eee-4b9a-b987-2ebdb93d0d11\") " Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.477666 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/498f7b6b-1eee-4b9a-b987-2ebdb93d0d11-logs" (OuterVolumeSpecName: "logs") pod "498f7b6b-1eee-4b9a-b987-2ebdb93d0d11" (UID: "498f7b6b-1eee-4b9a-b987-2ebdb93d0d11"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.477705 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mrdrm\" (UniqueName: \"kubernetes.io/projected/498f7b6b-1eee-4b9a-b987-2ebdb93d0d11-kube-api-access-mrdrm\") pod \"498f7b6b-1eee-4b9a-b987-2ebdb93d0d11\" (UID: \"498f7b6b-1eee-4b9a-b987-2ebdb93d0d11\") " Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.478134 4779 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/498f7b6b-1eee-4b9a-b987-2ebdb93d0d11-logs\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.483775 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/498f7b6b-1eee-4b9a-b987-2ebdb93d0d11-kube-api-access-mrdrm" (OuterVolumeSpecName: "kube-api-access-mrdrm") pod "498f7b6b-1eee-4b9a-b987-2ebdb93d0d11" (UID: "498f7b6b-1eee-4b9a-b987-2ebdb93d0d11"). InnerVolumeSpecName "kube-api-access-mrdrm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.513502 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/498f7b6b-1eee-4b9a-b987-2ebdb93d0d11-config-data" (OuterVolumeSpecName: "config-data") pod "498f7b6b-1eee-4b9a-b987-2ebdb93d0d11" (UID: "498f7b6b-1eee-4b9a-b987-2ebdb93d0d11"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.528581 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/498f7b6b-1eee-4b9a-b987-2ebdb93d0d11-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "498f7b6b-1eee-4b9a-b987-2ebdb93d0d11" (UID: "498f7b6b-1eee-4b9a-b987-2ebdb93d0d11"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.532305 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 29 09:49:04 crc kubenswrapper[4779]: W0929 09:49:04.538918 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9ae237e7_f974_47bd_803d_05af4bb116f1.slice/crio-3c8f5fd0c36d51569124a2c23576b3d3d929f26760ea091f68b2d31391643fec WatchSource:0}: Error finding container 3c8f5fd0c36d51569124a2c23576b3d3d929f26760ea091f68b2d31391643fec: Status 404 returned error can't find the container with id 3c8f5fd0c36d51569124a2c23576b3d3d929f26760ea091f68b2d31391643fec Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.545854 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/498f7b6b-1eee-4b9a-b987-2ebdb93d0d11-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "498f7b6b-1eee-4b9a-b987-2ebdb93d0d11" (UID: "498f7b6b-1eee-4b9a-b987-2ebdb93d0d11"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.580103 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/498f7b6b-1eee-4b9a-b987-2ebdb93d0d11-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.580158 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mrdrm\" (UniqueName: \"kubernetes.io/projected/498f7b6b-1eee-4b9a-b987-2ebdb93d0d11-kube-api-access-mrdrm\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.580178 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/498f7b6b-1eee-4b9a-b987-2ebdb93d0d11-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.580195 4779 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/498f7b6b-1eee-4b9a-b987-2ebdb93d0d11-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.617578 4779 generic.go:334] "Generic (PLEG): container finished" podID="498f7b6b-1eee-4b9a-b987-2ebdb93d0d11" containerID="9c747c3f8da7f8f9ecc0286d18c7958f90b976b20d3416af8c5fd47a134e114e" exitCode=0 Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.617890 4779 generic.go:334] "Generic (PLEG): container finished" podID="498f7b6b-1eee-4b9a-b987-2ebdb93d0d11" containerID="1f046b998505a2559f3fdc535efea622ba972734d46f0e7feb526ee3f20e9330" exitCode=143 Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.617608 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.617678 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"498f7b6b-1eee-4b9a-b987-2ebdb93d0d11","Type":"ContainerDied","Data":"9c747c3f8da7f8f9ecc0286d18c7958f90b976b20d3416af8c5fd47a134e114e"} Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.619770 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"498f7b6b-1eee-4b9a-b987-2ebdb93d0d11","Type":"ContainerDied","Data":"1f046b998505a2559f3fdc535efea622ba972734d46f0e7feb526ee3f20e9330"} Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.619792 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"498f7b6b-1eee-4b9a-b987-2ebdb93d0d11","Type":"ContainerDied","Data":"29dd4b2693581af23760e49a941dd43864e434499e913612ac5de8f15b82a20d"} Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.619816 4779 scope.go:117] "RemoveContainer" containerID="9c747c3f8da7f8f9ecc0286d18c7958f90b976b20d3416af8c5fd47a134e114e" Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.625779 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3074cf44-792f-4339-86f3-cd9dcdcd534f","Type":"ContainerStarted","Data":"92818d526b2f683814fff80074ee525441a8689ba7c7b4544b40b9be675c1cb6"} Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.625818 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3074cf44-792f-4339-86f3-cd9dcdcd534f","Type":"ContainerStarted","Data":"6c85f90fc00feca86a2a212b751e91f2e1eae8f5977c2f66792caac9210521ea"} Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.628122 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"9ae237e7-f974-47bd-803d-05af4bb116f1","Type":"ContainerStarted","Data":"3c8f5fd0c36d51569124a2c23576b3d3d929f26760ea091f68b2d31391643fec"} Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.628161 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-decision-engine-0" Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.676952 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.701665 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-decision-engine-0" Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.702776 4779 scope.go:117] "RemoveContainer" containerID="1f046b998505a2559f3fdc535efea622ba972734d46f0e7feb526ee3f20e9330" Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.703036 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.743421 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="498f7b6b-1eee-4b9a-b987-2ebdb93d0d11" path="/var/lib/kubelet/pods/498f7b6b-1eee-4b9a-b987-2ebdb93d0d11/volumes" Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.744271 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ab1c8bfe-eb10-4813-87dc-0df1ea736205" path="/var/lib/kubelet/pods/ab1c8bfe-eb10-4813-87dc-0df1ea736205/volumes" Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.745489 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 29 09:49:04 crc kubenswrapper[4779]: E0929 09:49:04.745829 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="498f7b6b-1eee-4b9a-b987-2ebdb93d0d11" containerName="nova-metadata-metadata" Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.745844 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="498f7b6b-1eee-4b9a-b987-2ebdb93d0d11" containerName="nova-metadata-metadata" Sep 29 09:49:04 crc kubenswrapper[4779]: E0929 09:49:04.745878 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="498f7b6b-1eee-4b9a-b987-2ebdb93d0d11" containerName="nova-metadata-log" Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.745889 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="498f7b6b-1eee-4b9a-b987-2ebdb93d0d11" containerName="nova-metadata-log" Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.746644 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="498f7b6b-1eee-4b9a-b987-2ebdb93d0d11" containerName="nova-metadata-metadata" Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.746668 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="498f7b6b-1eee-4b9a-b987-2ebdb93d0d11" containerName="nova-metadata-log" Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.747828 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.747943 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.750980 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-decision-engine-0"] Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.754420 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.754629 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.770067 4779 scope.go:117] "RemoveContainer" containerID="9c747c3f8da7f8f9ecc0286d18c7958f90b976b20d3416af8c5fd47a134e114e" Sep 29 09:49:04 crc kubenswrapper[4779]: E0929 09:49:04.770591 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9c747c3f8da7f8f9ecc0286d18c7958f90b976b20d3416af8c5fd47a134e114e\": container with ID starting with 9c747c3f8da7f8f9ecc0286d18c7958f90b976b20d3416af8c5fd47a134e114e not found: ID does not exist" containerID="9c747c3f8da7f8f9ecc0286d18c7958f90b976b20d3416af8c5fd47a134e114e" Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.770631 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c747c3f8da7f8f9ecc0286d18c7958f90b976b20d3416af8c5fd47a134e114e"} err="failed to get container status \"9c747c3f8da7f8f9ecc0286d18c7958f90b976b20d3416af8c5fd47a134e114e\": rpc error: code = NotFound desc = could not find container \"9c747c3f8da7f8f9ecc0286d18c7958f90b976b20d3416af8c5fd47a134e114e\": container with ID starting with 9c747c3f8da7f8f9ecc0286d18c7958f90b976b20d3416af8c5fd47a134e114e not found: ID does not exist" Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.770660 4779 scope.go:117] "RemoveContainer" containerID="1f046b998505a2559f3fdc535efea622ba972734d46f0e7feb526ee3f20e9330" Sep 29 09:49:04 crc kubenswrapper[4779]: E0929 09:49:04.771845 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1f046b998505a2559f3fdc535efea622ba972734d46f0e7feb526ee3f20e9330\": container with ID starting with 1f046b998505a2559f3fdc535efea622ba972734d46f0e7feb526ee3f20e9330 not found: ID does not exist" containerID="1f046b998505a2559f3fdc535efea622ba972734d46f0e7feb526ee3f20e9330" Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.772004 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f046b998505a2559f3fdc535efea622ba972734d46f0e7feb526ee3f20e9330"} err="failed to get container status \"1f046b998505a2559f3fdc535efea622ba972734d46f0e7feb526ee3f20e9330\": rpc error: code = NotFound desc = could not find container \"1f046b998505a2559f3fdc535efea622ba972734d46f0e7feb526ee3f20e9330\": container with ID starting with 1f046b998505a2559f3fdc535efea622ba972734d46f0e7feb526ee3f20e9330 not found: ID does not exist" Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.772038 4779 scope.go:117] "RemoveContainer" containerID="9c747c3f8da7f8f9ecc0286d18c7958f90b976b20d3416af8c5fd47a134e114e" Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.774222 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c747c3f8da7f8f9ecc0286d18c7958f90b976b20d3416af8c5fd47a134e114e"} err="failed to get container status \"9c747c3f8da7f8f9ecc0286d18c7958f90b976b20d3416af8c5fd47a134e114e\": rpc error: code = NotFound desc = could not find container \"9c747c3f8da7f8f9ecc0286d18c7958f90b976b20d3416af8c5fd47a134e114e\": container with ID starting with 9c747c3f8da7f8f9ecc0286d18c7958f90b976b20d3416af8c5fd47a134e114e not found: ID does not exist" Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.774256 4779 scope.go:117] "RemoveContainer" containerID="1f046b998505a2559f3fdc535efea622ba972734d46f0e7feb526ee3f20e9330" Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.774601 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f046b998505a2559f3fdc535efea622ba972734d46f0e7feb526ee3f20e9330"} err="failed to get container status \"1f046b998505a2559f3fdc535efea622ba972734d46f0e7feb526ee3f20e9330\": rpc error: code = NotFound desc = could not find container \"1f046b998505a2559f3fdc535efea622ba972734d46f0e7feb526ee3f20e9330\": container with ID starting with 1f046b998505a2559f3fdc535efea622ba972734d46f0e7feb526ee3f20e9330 not found: ID does not exist" Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.886310 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f5xft\" (UniqueName: \"kubernetes.io/projected/41eb7f46-6e65-479d-bca0-29471323c4d7-kube-api-access-f5xft\") pod \"nova-metadata-0\" (UID: \"41eb7f46-6e65-479d-bca0-29471323c4d7\") " pod="openstack/nova-metadata-0" Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.886422 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/41eb7f46-6e65-479d-bca0-29471323c4d7-logs\") pod \"nova-metadata-0\" (UID: \"41eb7f46-6e65-479d-bca0-29471323c4d7\") " pod="openstack/nova-metadata-0" Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.886466 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/41eb7f46-6e65-479d-bca0-29471323c4d7-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"41eb7f46-6e65-479d-bca0-29471323c4d7\") " pod="openstack/nova-metadata-0" Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.886493 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41eb7f46-6e65-479d-bca0-29471323c4d7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"41eb7f46-6e65-479d-bca0-29471323c4d7\") " pod="openstack/nova-metadata-0" Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.886538 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41eb7f46-6e65-479d-bca0-29471323c4d7-config-data\") pod \"nova-metadata-0\" (UID: \"41eb7f46-6e65-479d-bca0-29471323c4d7\") " pod="openstack/nova-metadata-0" Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.988059 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f5xft\" (UniqueName: \"kubernetes.io/projected/41eb7f46-6e65-479d-bca0-29471323c4d7-kube-api-access-f5xft\") pod \"nova-metadata-0\" (UID: \"41eb7f46-6e65-479d-bca0-29471323c4d7\") " pod="openstack/nova-metadata-0" Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.988233 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/41eb7f46-6e65-479d-bca0-29471323c4d7-logs\") pod \"nova-metadata-0\" (UID: \"41eb7f46-6e65-479d-bca0-29471323c4d7\") " pod="openstack/nova-metadata-0" Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.988739 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/41eb7f46-6e65-479d-bca0-29471323c4d7-logs\") pod \"nova-metadata-0\" (UID: \"41eb7f46-6e65-479d-bca0-29471323c4d7\") " pod="openstack/nova-metadata-0" Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.988826 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/41eb7f46-6e65-479d-bca0-29471323c4d7-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"41eb7f46-6e65-479d-bca0-29471323c4d7\") " pod="openstack/nova-metadata-0" Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.989668 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41eb7f46-6e65-479d-bca0-29471323c4d7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"41eb7f46-6e65-479d-bca0-29471323c4d7\") " pod="openstack/nova-metadata-0" Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.989786 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41eb7f46-6e65-479d-bca0-29471323c4d7-config-data\") pod \"nova-metadata-0\" (UID: \"41eb7f46-6e65-479d-bca0-29471323c4d7\") " pod="openstack/nova-metadata-0" Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.993548 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41eb7f46-6e65-479d-bca0-29471323c4d7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"41eb7f46-6e65-479d-bca0-29471323c4d7\") " pod="openstack/nova-metadata-0" Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.994513 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/41eb7f46-6e65-479d-bca0-29471323c4d7-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"41eb7f46-6e65-479d-bca0-29471323c4d7\") " pod="openstack/nova-metadata-0" Sep 29 09:49:04 crc kubenswrapper[4779]: I0929 09:49:04.996325 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41eb7f46-6e65-479d-bca0-29471323c4d7-config-data\") pod \"nova-metadata-0\" (UID: \"41eb7f46-6e65-479d-bca0-29471323c4d7\") " pod="openstack/nova-metadata-0" Sep 29 09:49:05 crc kubenswrapper[4779]: I0929 09:49:05.005519 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f5xft\" (UniqueName: \"kubernetes.io/projected/41eb7f46-6e65-479d-bca0-29471323c4d7-kube-api-access-f5xft\") pod \"nova-metadata-0\" (UID: \"41eb7f46-6e65-479d-bca0-29471323c4d7\") " pod="openstack/nova-metadata-0" Sep 29 09:49:05 crc kubenswrapper[4779]: I0929 09:49:05.081361 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 09:49:05 crc kubenswrapper[4779]: I0929 09:49:05.607250 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 09:49:05 crc kubenswrapper[4779]: I0929 09:49:05.647126 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3074cf44-792f-4339-86f3-cd9dcdcd534f","Type":"ContainerStarted","Data":"96578404e3dcee2ccb503f6677aec18c57ae49cfcd981a91840ecbfef7fa8c86"} Sep 29 09:49:05 crc kubenswrapper[4779]: I0929 09:49:05.649019 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"9ae237e7-f974-47bd-803d-05af4bb116f1","Type":"ContainerStarted","Data":"c0df73cd89f3bb8e446489951a7052b70b6529ca75ae8d2de77ba1c1f1713919"} Sep 29 09:49:05 crc kubenswrapper[4779]: I0929 09:49:05.650243 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Sep 29 09:49:05 crc kubenswrapper[4779]: I0929 09:49:05.653460 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"41eb7f46-6e65-479d-bca0-29471323c4d7","Type":"ContainerStarted","Data":"da2f1011f73a159b4323d5c2b87c749a642d6a13c1c8744890b61d856b33c307"} Sep 29 09:49:05 crc kubenswrapper[4779]: I0929 09:49:05.672344 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.67232834 podStartE2EDuration="2.67232834s" podCreationTimestamp="2025-09-29 09:49:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:49:05.664786485 +0000 UTC m=+1177.646110389" watchObservedRunningTime="2025-09-29 09:49:05.67232834 +0000 UTC m=+1177.653652244" Sep 29 09:49:05 crc kubenswrapper[4779]: E0929 09:49:05.976703 4779 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="fce0e087c6951450a195fef03c79845bd12864e723ebac5002d1730b5c768669" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 29 09:49:05 crc kubenswrapper[4779]: E0929 09:49:05.978462 4779 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="fce0e087c6951450a195fef03c79845bd12864e723ebac5002d1730b5c768669" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 29 09:49:05 crc kubenswrapper[4779]: E0929 09:49:05.979973 4779 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="fce0e087c6951450a195fef03c79845bd12864e723ebac5002d1730b5c768669" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 29 09:49:05 crc kubenswrapper[4779]: E0929 09:49:05.980021 4779 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="7c5de83c-d54a-44bc-8cee-f95541ef5a0a" containerName="nova-scheduler-scheduler" Sep 29 09:49:06 crc kubenswrapper[4779]: I0929 09:49:06.383099 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 09:49:06 crc kubenswrapper[4779]: I0929 09:49:06.518585 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/797a0428-592e-44f8-a1c4-78275dab99c7-combined-ca-bundle\") pod \"797a0428-592e-44f8-a1c4-78275dab99c7\" (UID: \"797a0428-592e-44f8-a1c4-78275dab99c7\") " Sep 29 09:49:06 crc kubenswrapper[4779]: I0929 09:49:06.518637 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/797a0428-592e-44f8-a1c4-78275dab99c7-logs\") pod \"797a0428-592e-44f8-a1c4-78275dab99c7\" (UID: \"797a0428-592e-44f8-a1c4-78275dab99c7\") " Sep 29 09:49:06 crc kubenswrapper[4779]: I0929 09:49:06.518702 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/797a0428-592e-44f8-a1c4-78275dab99c7-config-data\") pod \"797a0428-592e-44f8-a1c4-78275dab99c7\" (UID: \"797a0428-592e-44f8-a1c4-78275dab99c7\") " Sep 29 09:49:06 crc kubenswrapper[4779]: I0929 09:49:06.518740 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fghgs\" (UniqueName: \"kubernetes.io/projected/797a0428-592e-44f8-a1c4-78275dab99c7-kube-api-access-fghgs\") pod \"797a0428-592e-44f8-a1c4-78275dab99c7\" (UID: \"797a0428-592e-44f8-a1c4-78275dab99c7\") " Sep 29 09:49:06 crc kubenswrapper[4779]: I0929 09:49:06.519811 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/797a0428-592e-44f8-a1c4-78275dab99c7-logs" (OuterVolumeSpecName: "logs") pod "797a0428-592e-44f8-a1c4-78275dab99c7" (UID: "797a0428-592e-44f8-a1c4-78275dab99c7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:49:06 crc kubenswrapper[4779]: I0929 09:49:06.525349 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/797a0428-592e-44f8-a1c4-78275dab99c7-kube-api-access-fghgs" (OuterVolumeSpecName: "kube-api-access-fghgs") pod "797a0428-592e-44f8-a1c4-78275dab99c7" (UID: "797a0428-592e-44f8-a1c4-78275dab99c7"). InnerVolumeSpecName "kube-api-access-fghgs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:49:06 crc kubenswrapper[4779]: I0929 09:49:06.552973 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/797a0428-592e-44f8-a1c4-78275dab99c7-config-data" (OuterVolumeSpecName: "config-data") pod "797a0428-592e-44f8-a1c4-78275dab99c7" (UID: "797a0428-592e-44f8-a1c4-78275dab99c7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:49:06 crc kubenswrapper[4779]: I0929 09:49:06.561409 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/797a0428-592e-44f8-a1c4-78275dab99c7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "797a0428-592e-44f8-a1c4-78275dab99c7" (UID: "797a0428-592e-44f8-a1c4-78275dab99c7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:49:06 crc kubenswrapper[4779]: I0929 09:49:06.621445 4779 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/797a0428-592e-44f8-a1c4-78275dab99c7-logs\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:06 crc kubenswrapper[4779]: I0929 09:49:06.621477 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/797a0428-592e-44f8-a1c4-78275dab99c7-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:06 crc kubenswrapper[4779]: I0929 09:49:06.621487 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fghgs\" (UniqueName: \"kubernetes.io/projected/797a0428-592e-44f8-a1c4-78275dab99c7-kube-api-access-fghgs\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:06 crc kubenswrapper[4779]: I0929 09:49:06.621497 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/797a0428-592e-44f8-a1c4-78275dab99c7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:06 crc kubenswrapper[4779]: I0929 09:49:06.667611 4779 generic.go:334] "Generic (PLEG): container finished" podID="7c5de83c-d54a-44bc-8cee-f95541ef5a0a" containerID="fce0e087c6951450a195fef03c79845bd12864e723ebac5002d1730b5c768669" exitCode=0 Sep 29 09:49:06 crc kubenswrapper[4779]: I0929 09:49:06.667732 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"7c5de83c-d54a-44bc-8cee-f95541ef5a0a","Type":"ContainerDied","Data":"fce0e087c6951450a195fef03c79845bd12864e723ebac5002d1730b5c768669"} Sep 29 09:49:06 crc kubenswrapper[4779]: I0929 09:49:06.671310 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3074cf44-792f-4339-86f3-cd9dcdcd534f","Type":"ContainerStarted","Data":"71976e87f0d11a9c4a93ab2ecd5ebd26fb9b5efb0c7db1a184af6ef810ca4d34"} Sep 29 09:49:06 crc kubenswrapper[4779]: I0929 09:49:06.672064 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 09:49:06 crc kubenswrapper[4779]: I0929 09:49:06.674675 4779 generic.go:334] "Generic (PLEG): container finished" podID="797a0428-592e-44f8-a1c4-78275dab99c7" containerID="7228de1311977a530ea2451e7e6ff647d19cc8ec9779253fb1e1b55f451df11c" exitCode=0 Sep 29 09:49:06 crc kubenswrapper[4779]: I0929 09:49:06.674793 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"797a0428-592e-44f8-a1c4-78275dab99c7","Type":"ContainerDied","Data":"7228de1311977a530ea2451e7e6ff647d19cc8ec9779253fb1e1b55f451df11c"} Sep 29 09:49:06 crc kubenswrapper[4779]: I0929 09:49:06.674829 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"797a0428-592e-44f8-a1c4-78275dab99c7","Type":"ContainerDied","Data":"55c638b952b4a27fbe233b1112f432943608a612c46fe616f8626be1456fb71b"} Sep 29 09:49:06 crc kubenswrapper[4779]: I0929 09:49:06.674854 4779 scope.go:117] "RemoveContainer" containerID="7228de1311977a530ea2451e7e6ff647d19cc8ec9779253fb1e1b55f451df11c" Sep 29 09:49:06 crc kubenswrapper[4779]: I0929 09:49:06.675054 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 09:49:06 crc kubenswrapper[4779]: I0929 09:49:06.687891 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"41eb7f46-6e65-479d-bca0-29471323c4d7","Type":"ContainerStarted","Data":"edab7e3eabec018a4960fe536ec9cb683f6a0099b8f56f4a012fee82226ced82"} Sep 29 09:49:06 crc kubenswrapper[4779]: I0929 09:49:06.687999 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"41eb7f46-6e65-479d-bca0-29471323c4d7","Type":"ContainerStarted","Data":"b88bdedb6344915e35f7a221321cd69556c3178625c268d548d518e542f07dc9"} Sep 29 09:49:06 crc kubenswrapper[4779]: I0929 09:49:06.688103 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/watcher-decision-engine-0" podUID="f67e636b-969b-48ee-bbec-3d8b38b22274" containerName="watcher-decision-engine" containerID="cri-o://3eb7e06311b67b9128c6c006dc1d7ed6d1f20a00ed64c27b5f97404d4ecbb462" gracePeriod=30 Sep 29 09:49:06 crc kubenswrapper[4779]: I0929 09:49:06.703791 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.330519889 podStartE2EDuration="4.70376569s" podCreationTimestamp="2025-09-29 09:49:02 +0000 UTC" firstStartedPulling="2025-09-29 09:49:03.525289681 +0000 UTC m=+1175.506613585" lastFinishedPulling="2025-09-29 09:49:05.898535462 +0000 UTC m=+1177.879859386" observedRunningTime="2025-09-29 09:49:06.694374412 +0000 UTC m=+1178.675698366" watchObservedRunningTime="2025-09-29 09:49:06.70376569 +0000 UTC m=+1178.685089604" Sep 29 09:49:06 crc kubenswrapper[4779]: I0929 09:49:06.715564 4779 scope.go:117] "RemoveContainer" containerID="9be430b40b49986839bf6f6725ac2cbfffecfc00f565076bdea037ca9648e19a" Sep 29 09:49:06 crc kubenswrapper[4779]: I0929 09:49:06.728758 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.7287333350000003 podStartE2EDuration="2.728733335s" podCreationTimestamp="2025-09-29 09:49:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:49:06.720388666 +0000 UTC m=+1178.701712570" watchObservedRunningTime="2025-09-29 09:49:06.728733335 +0000 UTC m=+1178.710057249" Sep 29 09:49:06 crc kubenswrapper[4779]: I0929 09:49:06.751325 4779 scope.go:117] "RemoveContainer" containerID="7228de1311977a530ea2451e7e6ff647d19cc8ec9779253fb1e1b55f451df11c" Sep 29 09:49:06 crc kubenswrapper[4779]: E0929 09:49:06.751880 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7228de1311977a530ea2451e7e6ff647d19cc8ec9779253fb1e1b55f451df11c\": container with ID starting with 7228de1311977a530ea2451e7e6ff647d19cc8ec9779253fb1e1b55f451df11c not found: ID does not exist" containerID="7228de1311977a530ea2451e7e6ff647d19cc8ec9779253fb1e1b55f451df11c" Sep 29 09:49:06 crc kubenswrapper[4779]: I0929 09:49:06.751925 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7228de1311977a530ea2451e7e6ff647d19cc8ec9779253fb1e1b55f451df11c"} err="failed to get container status \"7228de1311977a530ea2451e7e6ff647d19cc8ec9779253fb1e1b55f451df11c\": rpc error: code = NotFound desc = could not find container \"7228de1311977a530ea2451e7e6ff647d19cc8ec9779253fb1e1b55f451df11c\": container with ID starting with 7228de1311977a530ea2451e7e6ff647d19cc8ec9779253fb1e1b55f451df11c not found: ID does not exist" Sep 29 09:49:06 crc kubenswrapper[4779]: I0929 09:49:06.751948 4779 scope.go:117] "RemoveContainer" containerID="9be430b40b49986839bf6f6725ac2cbfffecfc00f565076bdea037ca9648e19a" Sep 29 09:49:06 crc kubenswrapper[4779]: E0929 09:49:06.752302 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9be430b40b49986839bf6f6725ac2cbfffecfc00f565076bdea037ca9648e19a\": container with ID starting with 9be430b40b49986839bf6f6725ac2cbfffecfc00f565076bdea037ca9648e19a not found: ID does not exist" containerID="9be430b40b49986839bf6f6725ac2cbfffecfc00f565076bdea037ca9648e19a" Sep 29 09:49:06 crc kubenswrapper[4779]: I0929 09:49:06.752350 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9be430b40b49986839bf6f6725ac2cbfffecfc00f565076bdea037ca9648e19a"} err="failed to get container status \"9be430b40b49986839bf6f6725ac2cbfffecfc00f565076bdea037ca9648e19a\": rpc error: code = NotFound desc = could not find container \"9be430b40b49986839bf6f6725ac2cbfffecfc00f565076bdea037ca9648e19a\": container with ID starting with 9be430b40b49986839bf6f6725ac2cbfffecfc00f565076bdea037ca9648e19a not found: ID does not exist" Sep 29 09:49:06 crc kubenswrapper[4779]: I0929 09:49:06.754690 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 29 09:49:06 crc kubenswrapper[4779]: I0929 09:49:06.773399 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Sep 29 09:49:06 crc kubenswrapper[4779]: I0929 09:49:06.788050 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 29 09:49:06 crc kubenswrapper[4779]: E0929 09:49:06.788554 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="797a0428-592e-44f8-a1c4-78275dab99c7" containerName="nova-api-api" Sep 29 09:49:06 crc kubenswrapper[4779]: I0929 09:49:06.788573 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="797a0428-592e-44f8-a1c4-78275dab99c7" containerName="nova-api-api" Sep 29 09:49:06 crc kubenswrapper[4779]: E0929 09:49:06.788594 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="797a0428-592e-44f8-a1c4-78275dab99c7" containerName="nova-api-log" Sep 29 09:49:06 crc kubenswrapper[4779]: I0929 09:49:06.788603 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="797a0428-592e-44f8-a1c4-78275dab99c7" containerName="nova-api-log" Sep 29 09:49:06 crc kubenswrapper[4779]: I0929 09:49:06.788830 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="797a0428-592e-44f8-a1c4-78275dab99c7" containerName="nova-api-log" Sep 29 09:49:06 crc kubenswrapper[4779]: I0929 09:49:06.788849 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="797a0428-592e-44f8-a1c4-78275dab99c7" containerName="nova-api-api" Sep 29 09:49:06 crc kubenswrapper[4779]: I0929 09:49:06.790140 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 09:49:06 crc kubenswrapper[4779]: I0929 09:49:06.795450 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 29 09:49:06 crc kubenswrapper[4779]: I0929 09:49:06.795571 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 29 09:49:06 crc kubenswrapper[4779]: I0929 09:49:06.928884 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hjcrg\" (UniqueName: \"kubernetes.io/projected/b0d24523-ec54-4af5-90ce-b9c9188f92d9-kube-api-access-hjcrg\") pod \"nova-api-0\" (UID: \"b0d24523-ec54-4af5-90ce-b9c9188f92d9\") " pod="openstack/nova-api-0" Sep 29 09:49:06 crc kubenswrapper[4779]: I0929 09:49:06.928961 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b0d24523-ec54-4af5-90ce-b9c9188f92d9-config-data\") pod \"nova-api-0\" (UID: \"b0d24523-ec54-4af5-90ce-b9c9188f92d9\") " pod="openstack/nova-api-0" Sep 29 09:49:06 crc kubenswrapper[4779]: I0929 09:49:06.929683 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0d24523-ec54-4af5-90ce-b9c9188f92d9-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b0d24523-ec54-4af5-90ce-b9c9188f92d9\") " pod="openstack/nova-api-0" Sep 29 09:49:06 crc kubenswrapper[4779]: I0929 09:49:06.929988 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b0d24523-ec54-4af5-90ce-b9c9188f92d9-logs\") pod \"nova-api-0\" (UID: \"b0d24523-ec54-4af5-90ce-b9c9188f92d9\") " pod="openstack/nova-api-0" Sep 29 09:49:07 crc kubenswrapper[4779]: I0929 09:49:07.031993 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b0d24523-ec54-4af5-90ce-b9c9188f92d9-config-data\") pod \"nova-api-0\" (UID: \"b0d24523-ec54-4af5-90ce-b9c9188f92d9\") " pod="openstack/nova-api-0" Sep 29 09:49:07 crc kubenswrapper[4779]: I0929 09:49:07.032062 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0d24523-ec54-4af5-90ce-b9c9188f92d9-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b0d24523-ec54-4af5-90ce-b9c9188f92d9\") " pod="openstack/nova-api-0" Sep 29 09:49:07 crc kubenswrapper[4779]: I0929 09:49:07.032162 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b0d24523-ec54-4af5-90ce-b9c9188f92d9-logs\") pod \"nova-api-0\" (UID: \"b0d24523-ec54-4af5-90ce-b9c9188f92d9\") " pod="openstack/nova-api-0" Sep 29 09:49:07 crc kubenswrapper[4779]: I0929 09:49:07.032207 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hjcrg\" (UniqueName: \"kubernetes.io/projected/b0d24523-ec54-4af5-90ce-b9c9188f92d9-kube-api-access-hjcrg\") pod \"nova-api-0\" (UID: \"b0d24523-ec54-4af5-90ce-b9c9188f92d9\") " pod="openstack/nova-api-0" Sep 29 09:49:07 crc kubenswrapper[4779]: I0929 09:49:07.033195 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b0d24523-ec54-4af5-90ce-b9c9188f92d9-logs\") pod \"nova-api-0\" (UID: \"b0d24523-ec54-4af5-90ce-b9c9188f92d9\") " pod="openstack/nova-api-0" Sep 29 09:49:07 crc kubenswrapper[4779]: I0929 09:49:07.039628 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b0d24523-ec54-4af5-90ce-b9c9188f92d9-config-data\") pod \"nova-api-0\" (UID: \"b0d24523-ec54-4af5-90ce-b9c9188f92d9\") " pod="openstack/nova-api-0" Sep 29 09:49:07 crc kubenswrapper[4779]: I0929 09:49:07.039633 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0d24523-ec54-4af5-90ce-b9c9188f92d9-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b0d24523-ec54-4af5-90ce-b9c9188f92d9\") " pod="openstack/nova-api-0" Sep 29 09:49:07 crc kubenswrapper[4779]: I0929 09:49:07.051838 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hjcrg\" (UniqueName: \"kubernetes.io/projected/b0d24523-ec54-4af5-90ce-b9c9188f92d9-kube-api-access-hjcrg\") pod \"nova-api-0\" (UID: \"b0d24523-ec54-4af5-90ce-b9c9188f92d9\") " pod="openstack/nova-api-0" Sep 29 09:49:07 crc kubenswrapper[4779]: I0929 09:49:07.111978 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 09:49:07 crc kubenswrapper[4779]: I0929 09:49:07.236398 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 09:49:07 crc kubenswrapper[4779]: I0929 09:49:07.338749 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c5de83c-d54a-44bc-8cee-f95541ef5a0a-combined-ca-bundle\") pod \"7c5de83c-d54a-44bc-8cee-f95541ef5a0a\" (UID: \"7c5de83c-d54a-44bc-8cee-f95541ef5a0a\") " Sep 29 09:49:07 crc kubenswrapper[4779]: I0929 09:49:07.338822 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c5de83c-d54a-44bc-8cee-f95541ef5a0a-config-data\") pod \"7c5de83c-d54a-44bc-8cee-f95541ef5a0a\" (UID: \"7c5de83c-d54a-44bc-8cee-f95541ef5a0a\") " Sep 29 09:49:07 crc kubenswrapper[4779]: I0929 09:49:07.338978 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bs264\" (UniqueName: \"kubernetes.io/projected/7c5de83c-d54a-44bc-8cee-f95541ef5a0a-kube-api-access-bs264\") pod \"7c5de83c-d54a-44bc-8cee-f95541ef5a0a\" (UID: \"7c5de83c-d54a-44bc-8cee-f95541ef5a0a\") " Sep 29 09:49:07 crc kubenswrapper[4779]: I0929 09:49:07.343703 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7c5de83c-d54a-44bc-8cee-f95541ef5a0a-kube-api-access-bs264" (OuterVolumeSpecName: "kube-api-access-bs264") pod "7c5de83c-d54a-44bc-8cee-f95541ef5a0a" (UID: "7c5de83c-d54a-44bc-8cee-f95541ef5a0a"). InnerVolumeSpecName "kube-api-access-bs264". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:49:07 crc kubenswrapper[4779]: I0929 09:49:07.369123 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c5de83c-d54a-44bc-8cee-f95541ef5a0a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7c5de83c-d54a-44bc-8cee-f95541ef5a0a" (UID: "7c5de83c-d54a-44bc-8cee-f95541ef5a0a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:49:07 crc kubenswrapper[4779]: I0929 09:49:07.378060 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c5de83c-d54a-44bc-8cee-f95541ef5a0a-config-data" (OuterVolumeSpecName: "config-data") pod "7c5de83c-d54a-44bc-8cee-f95541ef5a0a" (UID: "7c5de83c-d54a-44bc-8cee-f95541ef5a0a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:49:07 crc kubenswrapper[4779]: I0929 09:49:07.441091 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bs264\" (UniqueName: \"kubernetes.io/projected/7c5de83c-d54a-44bc-8cee-f95541ef5a0a-kube-api-access-bs264\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:07 crc kubenswrapper[4779]: I0929 09:49:07.441119 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c5de83c-d54a-44bc-8cee-f95541ef5a0a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:07 crc kubenswrapper[4779]: I0929 09:49:07.441128 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c5de83c-d54a-44bc-8cee-f95541ef5a0a-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:07 crc kubenswrapper[4779]: I0929 09:49:07.642739 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 29 09:49:07 crc kubenswrapper[4779]: I0929 09:49:07.731895 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b0d24523-ec54-4af5-90ce-b9c9188f92d9","Type":"ContainerStarted","Data":"0cbceda1324728306f5da5c63b39fcdf25cfa633a67bc0e4f4a6456f452fed3b"} Sep 29 09:49:07 crc kubenswrapper[4779]: I0929 09:49:07.747138 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 09:49:07 crc kubenswrapper[4779]: I0929 09:49:07.749797 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"7c5de83c-d54a-44bc-8cee-f95541ef5a0a","Type":"ContainerDied","Data":"68d4f15559d198864f90db775443eb89fed8be2b3aa54904aac67eaf93443a1d"} Sep 29 09:49:07 crc kubenswrapper[4779]: I0929 09:49:07.749840 4779 scope.go:117] "RemoveContainer" containerID="fce0e087c6951450a195fef03c79845bd12864e723ebac5002d1730b5c768669" Sep 29 09:49:07 crc kubenswrapper[4779]: I0929 09:49:07.807709 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 09:49:07 crc kubenswrapper[4779]: I0929 09:49:07.825393 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 09:49:07 crc kubenswrapper[4779]: I0929 09:49:07.855710 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 09:49:07 crc kubenswrapper[4779]: E0929 09:49:07.856528 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c5de83c-d54a-44bc-8cee-f95541ef5a0a" containerName="nova-scheduler-scheduler" Sep 29 09:49:07 crc kubenswrapper[4779]: I0929 09:49:07.856548 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c5de83c-d54a-44bc-8cee-f95541ef5a0a" containerName="nova-scheduler-scheduler" Sep 29 09:49:07 crc kubenswrapper[4779]: I0929 09:49:07.862194 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c5de83c-d54a-44bc-8cee-f95541ef5a0a" containerName="nova-scheduler-scheduler" Sep 29 09:49:07 crc kubenswrapper[4779]: I0929 09:49:07.864154 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 09:49:07 crc kubenswrapper[4779]: I0929 09:49:07.868888 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 09:49:07 crc kubenswrapper[4779]: I0929 09:49:07.869290 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Sep 29 09:49:07 crc kubenswrapper[4779]: I0929 09:49:07.962365 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d517750c-87ac-4c0a-b80e-705b03317ba0-config-data\") pod \"nova-scheduler-0\" (UID: \"d517750c-87ac-4c0a-b80e-705b03317ba0\") " pod="openstack/nova-scheduler-0" Sep 29 09:49:07 crc kubenswrapper[4779]: I0929 09:49:07.962569 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nwdnd\" (UniqueName: \"kubernetes.io/projected/d517750c-87ac-4c0a-b80e-705b03317ba0-kube-api-access-nwdnd\") pod \"nova-scheduler-0\" (UID: \"d517750c-87ac-4c0a-b80e-705b03317ba0\") " pod="openstack/nova-scheduler-0" Sep 29 09:49:07 crc kubenswrapper[4779]: I0929 09:49:07.962626 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d517750c-87ac-4c0a-b80e-705b03317ba0-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d517750c-87ac-4c0a-b80e-705b03317ba0\") " pod="openstack/nova-scheduler-0" Sep 29 09:49:07 crc kubenswrapper[4779]: E0929 09:49:07.964417 4779 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7c5de83c_d54a_44bc_8cee_f95541ef5a0a.slice\": RecentStats: unable to find data in memory cache]" Sep 29 09:49:08 crc kubenswrapper[4779]: I0929 09:49:08.064973 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d517750c-87ac-4c0a-b80e-705b03317ba0-config-data\") pod \"nova-scheduler-0\" (UID: \"d517750c-87ac-4c0a-b80e-705b03317ba0\") " pod="openstack/nova-scheduler-0" Sep 29 09:49:08 crc kubenswrapper[4779]: I0929 09:49:08.065425 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nwdnd\" (UniqueName: \"kubernetes.io/projected/d517750c-87ac-4c0a-b80e-705b03317ba0-kube-api-access-nwdnd\") pod \"nova-scheduler-0\" (UID: \"d517750c-87ac-4c0a-b80e-705b03317ba0\") " pod="openstack/nova-scheduler-0" Sep 29 09:49:08 crc kubenswrapper[4779]: I0929 09:49:08.065511 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d517750c-87ac-4c0a-b80e-705b03317ba0-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d517750c-87ac-4c0a-b80e-705b03317ba0\") " pod="openstack/nova-scheduler-0" Sep 29 09:49:08 crc kubenswrapper[4779]: I0929 09:49:08.069747 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d517750c-87ac-4c0a-b80e-705b03317ba0-config-data\") pod \"nova-scheduler-0\" (UID: \"d517750c-87ac-4c0a-b80e-705b03317ba0\") " pod="openstack/nova-scheduler-0" Sep 29 09:49:08 crc kubenswrapper[4779]: I0929 09:49:08.074335 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d517750c-87ac-4c0a-b80e-705b03317ba0-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d517750c-87ac-4c0a-b80e-705b03317ba0\") " pod="openstack/nova-scheduler-0" Sep 29 09:49:08 crc kubenswrapper[4779]: I0929 09:49:08.086524 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nwdnd\" (UniqueName: \"kubernetes.io/projected/d517750c-87ac-4c0a-b80e-705b03317ba0-kube-api-access-nwdnd\") pod \"nova-scheduler-0\" (UID: \"d517750c-87ac-4c0a-b80e-705b03317ba0\") " pod="openstack/nova-scheduler-0" Sep 29 09:49:08 crc kubenswrapper[4779]: I0929 09:49:08.192209 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 09:49:08 crc kubenswrapper[4779]: I0929 09:49:08.682872 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 09:49:08 crc kubenswrapper[4779]: W0929 09:49:08.686630 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd517750c_87ac_4c0a_b80e_705b03317ba0.slice/crio-e0e9cc5cd7c3ad38ea8821f34fa2e2ead7bf985ea42b35df3bdfa09340aaf0f3 WatchSource:0}: Error finding container e0e9cc5cd7c3ad38ea8821f34fa2e2ead7bf985ea42b35df3bdfa09340aaf0f3: Status 404 returned error can't find the container with id e0e9cc5cd7c3ad38ea8821f34fa2e2ead7bf985ea42b35df3bdfa09340aaf0f3 Sep 29 09:49:08 crc kubenswrapper[4779]: I0929 09:49:08.739728 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="797a0428-592e-44f8-a1c4-78275dab99c7" path="/var/lib/kubelet/pods/797a0428-592e-44f8-a1c4-78275dab99c7/volumes" Sep 29 09:49:08 crc kubenswrapper[4779]: I0929 09:49:08.740569 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7c5de83c-d54a-44bc-8cee-f95541ef5a0a" path="/var/lib/kubelet/pods/7c5de83c-d54a-44bc-8cee-f95541ef5a0a/volumes" Sep 29 09:49:08 crc kubenswrapper[4779]: I0929 09:49:08.761044 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d517750c-87ac-4c0a-b80e-705b03317ba0","Type":"ContainerStarted","Data":"e0e9cc5cd7c3ad38ea8821f34fa2e2ead7bf985ea42b35df3bdfa09340aaf0f3"} Sep 29 09:49:08 crc kubenswrapper[4779]: I0929 09:49:08.763666 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b0d24523-ec54-4af5-90ce-b9c9188f92d9","Type":"ContainerStarted","Data":"cff9016bd7da203083c8e2815ea78c219fc3d726c9ea78a3acd4d696dd50030e"} Sep 29 09:49:08 crc kubenswrapper[4779]: I0929 09:49:08.763721 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b0d24523-ec54-4af5-90ce-b9c9188f92d9","Type":"ContainerStarted","Data":"28f892ab57952c3fdc8c7789725e2f68fb97fd8a089822304b62069e3624d268"} Sep 29 09:49:08 crc kubenswrapper[4779]: I0929 09:49:08.788507 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.788491547 podStartE2EDuration="2.788491547s" podCreationTimestamp="2025-09-29 09:49:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:49:08.788181528 +0000 UTC m=+1180.769505452" watchObservedRunningTime="2025-09-29 09:49:08.788491547 +0000 UTC m=+1180.769815451" Sep 29 09:49:09 crc kubenswrapper[4779]: I0929 09:49:09.779465 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d517750c-87ac-4c0a-b80e-705b03317ba0","Type":"ContainerStarted","Data":"45590e4cbae05a9131d9d7c26aece6f8431c36aa4e7b7accd758ad519c06cd6b"} Sep 29 09:49:09 crc kubenswrapper[4779]: I0929 09:49:09.805067 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.805051531 podStartE2EDuration="2.805051531s" podCreationTimestamp="2025-09-29 09:49:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:49:09.801262613 +0000 UTC m=+1181.782586537" watchObservedRunningTime="2025-09-29 09:49:09.805051531 +0000 UTC m=+1181.786375435" Sep 29 09:49:09 crc kubenswrapper[4779]: I0929 09:49:09.930847 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Sep 29 09:49:10 crc kubenswrapper[4779]: I0929 09:49:10.082982 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 29 09:49:10 crc kubenswrapper[4779]: I0929 09:49:10.083035 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 29 09:49:12 crc kubenswrapper[4779]: I0929 09:49:12.823544 4779 generic.go:334] "Generic (PLEG): container finished" podID="f67e636b-969b-48ee-bbec-3d8b38b22274" containerID="3eb7e06311b67b9128c6c006dc1d7ed6d1f20a00ed64c27b5f97404d4ecbb462" exitCode=0 Sep 29 09:49:12 crc kubenswrapper[4779]: I0929 09:49:12.823589 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"f67e636b-969b-48ee-bbec-3d8b38b22274","Type":"ContainerDied","Data":"3eb7e06311b67b9128c6c006dc1d7ed6d1f20a00ed64c27b5f97404d4ecbb462"} Sep 29 09:49:12 crc kubenswrapper[4779]: I0929 09:49:12.824155 4779 scope.go:117] "RemoveContainer" containerID="04ec2cc73b2c81ba077c7fc00e233e5df22795dfb270435052946778c9421ed3" Sep 29 09:49:13 crc kubenswrapper[4779]: I0929 09:49:13.193604 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Sep 29 09:49:13 crc kubenswrapper[4779]: I0929 09:49:13.270562 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-decision-engine-0" Sep 29 09:49:13 crc kubenswrapper[4779]: I0929 09:49:13.376421 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f67e636b-969b-48ee-bbec-3d8b38b22274-config-data\") pod \"f67e636b-969b-48ee-bbec-3d8b38b22274\" (UID: \"f67e636b-969b-48ee-bbec-3d8b38b22274\") " Sep 29 09:49:13 crc kubenswrapper[4779]: I0929 09:49:13.376554 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f67e636b-969b-48ee-bbec-3d8b38b22274-combined-ca-bundle\") pod \"f67e636b-969b-48ee-bbec-3d8b38b22274\" (UID: \"f67e636b-969b-48ee-bbec-3d8b38b22274\") " Sep 29 09:49:13 crc kubenswrapper[4779]: I0929 09:49:13.376601 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/f67e636b-969b-48ee-bbec-3d8b38b22274-custom-prometheus-ca\") pod \"f67e636b-969b-48ee-bbec-3d8b38b22274\" (UID: \"f67e636b-969b-48ee-bbec-3d8b38b22274\") " Sep 29 09:49:13 crc kubenswrapper[4779]: I0929 09:49:13.376637 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jbdmx\" (UniqueName: \"kubernetes.io/projected/f67e636b-969b-48ee-bbec-3d8b38b22274-kube-api-access-jbdmx\") pod \"f67e636b-969b-48ee-bbec-3d8b38b22274\" (UID: \"f67e636b-969b-48ee-bbec-3d8b38b22274\") " Sep 29 09:49:13 crc kubenswrapper[4779]: I0929 09:49:13.376689 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f67e636b-969b-48ee-bbec-3d8b38b22274-logs\") pod \"f67e636b-969b-48ee-bbec-3d8b38b22274\" (UID: \"f67e636b-969b-48ee-bbec-3d8b38b22274\") " Sep 29 09:49:13 crc kubenswrapper[4779]: I0929 09:49:13.377462 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f67e636b-969b-48ee-bbec-3d8b38b22274-logs" (OuterVolumeSpecName: "logs") pod "f67e636b-969b-48ee-bbec-3d8b38b22274" (UID: "f67e636b-969b-48ee-bbec-3d8b38b22274"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:49:13 crc kubenswrapper[4779]: I0929 09:49:13.391112 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f67e636b-969b-48ee-bbec-3d8b38b22274-kube-api-access-jbdmx" (OuterVolumeSpecName: "kube-api-access-jbdmx") pod "f67e636b-969b-48ee-bbec-3d8b38b22274" (UID: "f67e636b-969b-48ee-bbec-3d8b38b22274"). InnerVolumeSpecName "kube-api-access-jbdmx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:49:13 crc kubenswrapper[4779]: I0929 09:49:13.408068 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f67e636b-969b-48ee-bbec-3d8b38b22274-custom-prometheus-ca" (OuterVolumeSpecName: "custom-prometheus-ca") pod "f67e636b-969b-48ee-bbec-3d8b38b22274" (UID: "f67e636b-969b-48ee-bbec-3d8b38b22274"). InnerVolumeSpecName "custom-prometheus-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:49:13 crc kubenswrapper[4779]: I0929 09:49:13.409600 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f67e636b-969b-48ee-bbec-3d8b38b22274-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f67e636b-969b-48ee-bbec-3d8b38b22274" (UID: "f67e636b-969b-48ee-bbec-3d8b38b22274"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:49:13 crc kubenswrapper[4779]: I0929 09:49:13.427868 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f67e636b-969b-48ee-bbec-3d8b38b22274-config-data" (OuterVolumeSpecName: "config-data") pod "f67e636b-969b-48ee-bbec-3d8b38b22274" (UID: "f67e636b-969b-48ee-bbec-3d8b38b22274"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:49:13 crc kubenswrapper[4779]: I0929 09:49:13.479021 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f67e636b-969b-48ee-bbec-3d8b38b22274-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:13 crc kubenswrapper[4779]: I0929 09:49:13.479057 4779 reconciler_common.go:293] "Volume detached for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/f67e636b-969b-48ee-bbec-3d8b38b22274-custom-prometheus-ca\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:13 crc kubenswrapper[4779]: I0929 09:49:13.479068 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jbdmx\" (UniqueName: \"kubernetes.io/projected/f67e636b-969b-48ee-bbec-3d8b38b22274-kube-api-access-jbdmx\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:13 crc kubenswrapper[4779]: I0929 09:49:13.479077 4779 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f67e636b-969b-48ee-bbec-3d8b38b22274-logs\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:13 crc kubenswrapper[4779]: I0929 09:49:13.479087 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f67e636b-969b-48ee-bbec-3d8b38b22274-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:13 crc kubenswrapper[4779]: I0929 09:49:13.837849 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"f67e636b-969b-48ee-bbec-3d8b38b22274","Type":"ContainerDied","Data":"57d94e80b743a707414e4e07a75c1ca84c2998e24c168ff843f3ec6827c4fe90"} Sep 29 09:49:13 crc kubenswrapper[4779]: I0929 09:49:13.837888 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-decision-engine-0" Sep 29 09:49:13 crc kubenswrapper[4779]: I0929 09:49:13.837948 4779 scope.go:117] "RemoveContainer" containerID="3eb7e06311b67b9128c6c006dc1d7ed6d1f20a00ed64c27b5f97404d4ecbb462" Sep 29 09:49:13 crc kubenswrapper[4779]: I0929 09:49:13.906366 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-decision-engine-0"] Sep 29 09:49:13 crc kubenswrapper[4779]: I0929 09:49:13.913072 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/watcher-decision-engine-0"] Sep 29 09:49:13 crc kubenswrapper[4779]: I0929 09:49:13.929249 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-decision-engine-0"] Sep 29 09:49:13 crc kubenswrapper[4779]: E0929 09:49:13.929727 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f67e636b-969b-48ee-bbec-3d8b38b22274" containerName="watcher-decision-engine" Sep 29 09:49:13 crc kubenswrapper[4779]: I0929 09:49:13.929755 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="f67e636b-969b-48ee-bbec-3d8b38b22274" containerName="watcher-decision-engine" Sep 29 09:49:13 crc kubenswrapper[4779]: E0929 09:49:13.929773 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f67e636b-969b-48ee-bbec-3d8b38b22274" containerName="watcher-decision-engine" Sep 29 09:49:13 crc kubenswrapper[4779]: I0929 09:49:13.929782 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="f67e636b-969b-48ee-bbec-3d8b38b22274" containerName="watcher-decision-engine" Sep 29 09:49:13 crc kubenswrapper[4779]: E0929 09:49:13.929805 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f67e636b-969b-48ee-bbec-3d8b38b22274" containerName="watcher-decision-engine" Sep 29 09:49:13 crc kubenswrapper[4779]: I0929 09:49:13.929813 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="f67e636b-969b-48ee-bbec-3d8b38b22274" containerName="watcher-decision-engine" Sep 29 09:49:13 crc kubenswrapper[4779]: E0929 09:49:13.929827 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f67e636b-969b-48ee-bbec-3d8b38b22274" containerName="watcher-decision-engine" Sep 29 09:49:13 crc kubenswrapper[4779]: I0929 09:49:13.929834 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="f67e636b-969b-48ee-bbec-3d8b38b22274" containerName="watcher-decision-engine" Sep 29 09:49:13 crc kubenswrapper[4779]: I0929 09:49:13.930079 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="f67e636b-969b-48ee-bbec-3d8b38b22274" containerName="watcher-decision-engine" Sep 29 09:49:13 crc kubenswrapper[4779]: I0929 09:49:13.930104 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="f67e636b-969b-48ee-bbec-3d8b38b22274" containerName="watcher-decision-engine" Sep 29 09:49:13 crc kubenswrapper[4779]: I0929 09:49:13.930114 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="f67e636b-969b-48ee-bbec-3d8b38b22274" containerName="watcher-decision-engine" Sep 29 09:49:13 crc kubenswrapper[4779]: I0929 09:49:13.931082 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-decision-engine-0" Sep 29 09:49:13 crc kubenswrapper[4779]: I0929 09:49:13.934313 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-decision-engine-config-data" Sep 29 09:49:13 crc kubenswrapper[4779]: I0929 09:49:13.938705 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-decision-engine-0"] Sep 29 09:49:14 crc kubenswrapper[4779]: I0929 09:49:14.023236 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Sep 29 09:49:14 crc kubenswrapper[4779]: I0929 09:49:14.090248 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b8a6691-6feb-4a11-acd4-0e02f5454a47-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"8b8a6691-6feb-4a11-acd4-0e02f5454a47\") " pod="openstack/watcher-decision-engine-0" Sep 29 09:49:14 crc kubenswrapper[4779]: I0929 09:49:14.090318 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b8a6691-6feb-4a11-acd4-0e02f5454a47-config-data\") pod \"watcher-decision-engine-0\" (UID: \"8b8a6691-6feb-4a11-acd4-0e02f5454a47\") " pod="openstack/watcher-decision-engine-0" Sep 29 09:49:14 crc kubenswrapper[4779]: I0929 09:49:14.090410 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/8b8a6691-6feb-4a11-acd4-0e02f5454a47-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"8b8a6691-6feb-4a11-acd4-0e02f5454a47\") " pod="openstack/watcher-decision-engine-0" Sep 29 09:49:14 crc kubenswrapper[4779]: I0929 09:49:14.090502 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kswnq\" (UniqueName: \"kubernetes.io/projected/8b8a6691-6feb-4a11-acd4-0e02f5454a47-kube-api-access-kswnq\") pod \"watcher-decision-engine-0\" (UID: \"8b8a6691-6feb-4a11-acd4-0e02f5454a47\") " pod="openstack/watcher-decision-engine-0" Sep 29 09:49:14 crc kubenswrapper[4779]: I0929 09:49:14.090590 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8b8a6691-6feb-4a11-acd4-0e02f5454a47-logs\") pod \"watcher-decision-engine-0\" (UID: \"8b8a6691-6feb-4a11-acd4-0e02f5454a47\") " pod="openstack/watcher-decision-engine-0" Sep 29 09:49:14 crc kubenswrapper[4779]: I0929 09:49:14.192448 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b8a6691-6feb-4a11-acd4-0e02f5454a47-config-data\") pod \"watcher-decision-engine-0\" (UID: \"8b8a6691-6feb-4a11-acd4-0e02f5454a47\") " pod="openstack/watcher-decision-engine-0" Sep 29 09:49:14 crc kubenswrapper[4779]: I0929 09:49:14.192515 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/8b8a6691-6feb-4a11-acd4-0e02f5454a47-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"8b8a6691-6feb-4a11-acd4-0e02f5454a47\") " pod="openstack/watcher-decision-engine-0" Sep 29 09:49:14 crc kubenswrapper[4779]: I0929 09:49:14.192618 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kswnq\" (UniqueName: \"kubernetes.io/projected/8b8a6691-6feb-4a11-acd4-0e02f5454a47-kube-api-access-kswnq\") pod \"watcher-decision-engine-0\" (UID: \"8b8a6691-6feb-4a11-acd4-0e02f5454a47\") " pod="openstack/watcher-decision-engine-0" Sep 29 09:49:14 crc kubenswrapper[4779]: I0929 09:49:14.192664 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8b8a6691-6feb-4a11-acd4-0e02f5454a47-logs\") pod \"watcher-decision-engine-0\" (UID: \"8b8a6691-6feb-4a11-acd4-0e02f5454a47\") " pod="openstack/watcher-decision-engine-0" Sep 29 09:49:14 crc kubenswrapper[4779]: I0929 09:49:14.192763 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b8a6691-6feb-4a11-acd4-0e02f5454a47-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"8b8a6691-6feb-4a11-acd4-0e02f5454a47\") " pod="openstack/watcher-decision-engine-0" Sep 29 09:49:14 crc kubenswrapper[4779]: I0929 09:49:14.193670 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8b8a6691-6feb-4a11-acd4-0e02f5454a47-logs\") pod \"watcher-decision-engine-0\" (UID: \"8b8a6691-6feb-4a11-acd4-0e02f5454a47\") " pod="openstack/watcher-decision-engine-0" Sep 29 09:49:14 crc kubenswrapper[4779]: I0929 09:49:14.198185 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b8a6691-6feb-4a11-acd4-0e02f5454a47-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"8b8a6691-6feb-4a11-acd4-0e02f5454a47\") " pod="openstack/watcher-decision-engine-0" Sep 29 09:49:14 crc kubenswrapper[4779]: I0929 09:49:14.198279 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/8b8a6691-6feb-4a11-acd4-0e02f5454a47-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"8b8a6691-6feb-4a11-acd4-0e02f5454a47\") " pod="openstack/watcher-decision-engine-0" Sep 29 09:49:14 crc kubenswrapper[4779]: I0929 09:49:14.198545 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b8a6691-6feb-4a11-acd4-0e02f5454a47-config-data\") pod \"watcher-decision-engine-0\" (UID: \"8b8a6691-6feb-4a11-acd4-0e02f5454a47\") " pod="openstack/watcher-decision-engine-0" Sep 29 09:49:14 crc kubenswrapper[4779]: I0929 09:49:14.208499 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kswnq\" (UniqueName: \"kubernetes.io/projected/8b8a6691-6feb-4a11-acd4-0e02f5454a47-kube-api-access-kswnq\") pod \"watcher-decision-engine-0\" (UID: \"8b8a6691-6feb-4a11-acd4-0e02f5454a47\") " pod="openstack/watcher-decision-engine-0" Sep 29 09:49:14 crc kubenswrapper[4779]: I0929 09:49:14.256570 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-decision-engine-0" Sep 29 09:49:14 crc kubenswrapper[4779]: I0929 09:49:14.737482 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f67e636b-969b-48ee-bbec-3d8b38b22274" path="/var/lib/kubelet/pods/f67e636b-969b-48ee-bbec-3d8b38b22274/volumes" Sep 29 09:49:14 crc kubenswrapper[4779]: W0929 09:49:14.764852 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8b8a6691_6feb_4a11_acd4_0e02f5454a47.slice/crio-e6b6f97e2a54dabbd96e727b18c79b686e88143434c6839bd178248575b5db4f WatchSource:0}: Error finding container e6b6f97e2a54dabbd96e727b18c79b686e88143434c6839bd178248575b5db4f: Status 404 returned error can't find the container with id e6b6f97e2a54dabbd96e727b18c79b686e88143434c6839bd178248575b5db4f Sep 29 09:49:14 crc kubenswrapper[4779]: I0929 09:49:14.767264 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-decision-engine-0"] Sep 29 09:49:14 crc kubenswrapper[4779]: I0929 09:49:14.846571 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"8b8a6691-6feb-4a11-acd4-0e02f5454a47","Type":"ContainerStarted","Data":"e6b6f97e2a54dabbd96e727b18c79b686e88143434c6839bd178248575b5db4f"} Sep 29 09:49:15 crc kubenswrapper[4779]: I0929 09:49:15.083129 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 29 09:49:15 crc kubenswrapper[4779]: I0929 09:49:15.083472 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 29 09:49:15 crc kubenswrapper[4779]: I0929 09:49:15.859887 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"8b8a6691-6feb-4a11-acd4-0e02f5454a47","Type":"ContainerStarted","Data":"70198f9becf67dd1314b9454a37a04c63dcf961a93903a7707de79dd608b2c48"} Sep 29 09:49:16 crc kubenswrapper[4779]: I0929 09:49:16.103209 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="41eb7f46-6e65-479d-bca0-29471323c4d7" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.198:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 09:49:16 crc kubenswrapper[4779]: I0929 09:49:16.103254 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="41eb7f46-6e65-479d-bca0-29471323c4d7" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.198:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 09:49:17 crc kubenswrapper[4779]: I0929 09:49:17.115192 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 29 09:49:17 crc kubenswrapper[4779]: I0929 09:49:17.115605 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 29 09:49:18 crc kubenswrapper[4779]: I0929 09:49:18.156157 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="b0d24523-ec54-4af5-90ce-b9c9188f92d9" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.199:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 09:49:18 crc kubenswrapper[4779]: I0929 09:49:18.193257 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Sep 29 09:49:18 crc kubenswrapper[4779]: I0929 09:49:18.197075 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="b0d24523-ec54-4af5-90ce-b9c9188f92d9" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.199:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 09:49:18 crc kubenswrapper[4779]: I0929 09:49:18.226787 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Sep 29 09:49:18 crc kubenswrapper[4779]: I0929 09:49:18.248191 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-decision-engine-0" podStartSLOduration=5.248175129 podStartE2EDuration="5.248175129s" podCreationTimestamp="2025-09-29 09:49:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:49:15.884636526 +0000 UTC m=+1187.865960430" watchObservedRunningTime="2025-09-29 09:49:18.248175129 +0000 UTC m=+1190.229499033" Sep 29 09:49:18 crc kubenswrapper[4779]: I0929 09:49:18.919560 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Sep 29 09:49:24 crc kubenswrapper[4779]: I0929 09:49:24.257592 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Sep 29 09:49:24 crc kubenswrapper[4779]: I0929 09:49:24.282021 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/watcher-decision-engine-0" Sep 29 09:49:24 crc kubenswrapper[4779]: I0929 09:49:24.958708 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-decision-engine-0" Sep 29 09:49:24 crc kubenswrapper[4779]: I0929 09:49:24.989925 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-decision-engine-0" Sep 29 09:49:25 crc kubenswrapper[4779]: I0929 09:49:25.089016 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Sep 29 09:49:25 crc kubenswrapper[4779]: I0929 09:49:25.090356 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Sep 29 09:49:25 crc kubenswrapper[4779]: I0929 09:49:25.094757 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Sep 29 09:49:25 crc kubenswrapper[4779]: I0929 09:49:25.847819 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 29 09:49:25 crc kubenswrapper[4779]: I0929 09:49:25.971654 4779 generic.go:334] "Generic (PLEG): container finished" podID="39e1d56f-d495-4b29-b2c7-409bf0e79550" containerID="ca5de4dbbba4b79379498b14547fa6477f634ef182151e235943fcdc9dfec369" exitCode=137 Sep 29 09:49:25 crc kubenswrapper[4779]: I0929 09:49:25.971708 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"39e1d56f-d495-4b29-b2c7-409bf0e79550","Type":"ContainerDied","Data":"ca5de4dbbba4b79379498b14547fa6477f634ef182151e235943fcdc9dfec369"} Sep 29 09:49:25 crc kubenswrapper[4779]: I0929 09:49:25.971757 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"39e1d56f-d495-4b29-b2c7-409bf0e79550","Type":"ContainerDied","Data":"d329346600d7b234112f24b8800d4986502a2755e03278255ce5edf349d7089c"} Sep 29 09:49:25 crc kubenswrapper[4779]: I0929 09:49:25.971778 4779 scope.go:117] "RemoveContainer" containerID="ca5de4dbbba4b79379498b14547fa6477f634ef182151e235943fcdc9dfec369" Sep 29 09:49:25 crc kubenswrapper[4779]: I0929 09:49:25.973015 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 29 09:49:25 crc kubenswrapper[4779]: I0929 09:49:25.980046 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Sep 29 09:49:26 crc kubenswrapper[4779]: I0929 09:49:26.003922 4779 scope.go:117] "RemoveContainer" containerID="ca5de4dbbba4b79379498b14547fa6477f634ef182151e235943fcdc9dfec369" Sep 29 09:49:26 crc kubenswrapper[4779]: E0929 09:49:26.014290 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ca5de4dbbba4b79379498b14547fa6477f634ef182151e235943fcdc9dfec369\": container with ID starting with ca5de4dbbba4b79379498b14547fa6477f634ef182151e235943fcdc9dfec369 not found: ID does not exist" containerID="ca5de4dbbba4b79379498b14547fa6477f634ef182151e235943fcdc9dfec369" Sep 29 09:49:26 crc kubenswrapper[4779]: I0929 09:49:26.014343 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ca5de4dbbba4b79379498b14547fa6477f634ef182151e235943fcdc9dfec369"} err="failed to get container status \"ca5de4dbbba4b79379498b14547fa6477f634ef182151e235943fcdc9dfec369\": rpc error: code = NotFound desc = could not find container \"ca5de4dbbba4b79379498b14547fa6477f634ef182151e235943fcdc9dfec369\": container with ID starting with ca5de4dbbba4b79379498b14547fa6477f634ef182151e235943fcdc9dfec369 not found: ID does not exist" Sep 29 09:49:26 crc kubenswrapper[4779]: I0929 09:49:26.025088 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6b2sr\" (UniqueName: \"kubernetes.io/projected/39e1d56f-d495-4b29-b2c7-409bf0e79550-kube-api-access-6b2sr\") pod \"39e1d56f-d495-4b29-b2c7-409bf0e79550\" (UID: \"39e1d56f-d495-4b29-b2c7-409bf0e79550\") " Sep 29 09:49:26 crc kubenswrapper[4779]: I0929 09:49:26.025162 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39e1d56f-d495-4b29-b2c7-409bf0e79550-combined-ca-bundle\") pod \"39e1d56f-d495-4b29-b2c7-409bf0e79550\" (UID: \"39e1d56f-d495-4b29-b2c7-409bf0e79550\") " Sep 29 09:49:26 crc kubenswrapper[4779]: I0929 09:49:26.025206 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/39e1d56f-d495-4b29-b2c7-409bf0e79550-config-data\") pod \"39e1d56f-d495-4b29-b2c7-409bf0e79550\" (UID: \"39e1d56f-d495-4b29-b2c7-409bf0e79550\") " Sep 29 09:49:26 crc kubenswrapper[4779]: I0929 09:49:26.040731 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39e1d56f-d495-4b29-b2c7-409bf0e79550-kube-api-access-6b2sr" (OuterVolumeSpecName: "kube-api-access-6b2sr") pod "39e1d56f-d495-4b29-b2c7-409bf0e79550" (UID: "39e1d56f-d495-4b29-b2c7-409bf0e79550"). InnerVolumeSpecName "kube-api-access-6b2sr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:49:26 crc kubenswrapper[4779]: I0929 09:49:26.054114 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/39e1d56f-d495-4b29-b2c7-409bf0e79550-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "39e1d56f-d495-4b29-b2c7-409bf0e79550" (UID: "39e1d56f-d495-4b29-b2c7-409bf0e79550"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:49:26 crc kubenswrapper[4779]: I0929 09:49:26.081155 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/39e1d56f-d495-4b29-b2c7-409bf0e79550-config-data" (OuterVolumeSpecName: "config-data") pod "39e1d56f-d495-4b29-b2c7-409bf0e79550" (UID: "39e1d56f-d495-4b29-b2c7-409bf0e79550"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:49:26 crc kubenswrapper[4779]: I0929 09:49:26.129812 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6b2sr\" (UniqueName: \"kubernetes.io/projected/39e1d56f-d495-4b29-b2c7-409bf0e79550-kube-api-access-6b2sr\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:26 crc kubenswrapper[4779]: I0929 09:49:26.129849 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39e1d56f-d495-4b29-b2c7-409bf0e79550-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:26 crc kubenswrapper[4779]: I0929 09:49:26.129862 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/39e1d56f-d495-4b29-b2c7-409bf0e79550-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:26 crc kubenswrapper[4779]: I0929 09:49:26.321848 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 09:49:26 crc kubenswrapper[4779]: I0929 09:49:26.334825 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 09:49:26 crc kubenswrapper[4779]: I0929 09:49:26.350550 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 09:49:26 crc kubenswrapper[4779]: E0929 09:49:26.351199 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f67e636b-969b-48ee-bbec-3d8b38b22274" containerName="watcher-decision-engine" Sep 29 09:49:26 crc kubenswrapper[4779]: I0929 09:49:26.351232 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="f67e636b-969b-48ee-bbec-3d8b38b22274" containerName="watcher-decision-engine" Sep 29 09:49:26 crc kubenswrapper[4779]: E0929 09:49:26.351280 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39e1d56f-d495-4b29-b2c7-409bf0e79550" containerName="nova-cell1-novncproxy-novncproxy" Sep 29 09:49:26 crc kubenswrapper[4779]: I0929 09:49:26.351294 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="39e1d56f-d495-4b29-b2c7-409bf0e79550" containerName="nova-cell1-novncproxy-novncproxy" Sep 29 09:49:26 crc kubenswrapper[4779]: I0929 09:49:26.351642 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="39e1d56f-d495-4b29-b2c7-409bf0e79550" containerName="nova-cell1-novncproxy-novncproxy" Sep 29 09:49:26 crc kubenswrapper[4779]: I0929 09:49:26.351684 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="f67e636b-969b-48ee-bbec-3d8b38b22274" containerName="watcher-decision-engine" Sep 29 09:49:26 crc kubenswrapper[4779]: I0929 09:49:26.351702 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="f67e636b-969b-48ee-bbec-3d8b38b22274" containerName="watcher-decision-engine" Sep 29 09:49:26 crc kubenswrapper[4779]: I0929 09:49:26.352833 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 29 09:49:26 crc kubenswrapper[4779]: I0929 09:49:26.355947 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Sep 29 09:49:26 crc kubenswrapper[4779]: I0929 09:49:26.356343 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Sep 29 09:49:26 crc kubenswrapper[4779]: I0929 09:49:26.358743 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Sep 29 09:49:26 crc kubenswrapper[4779]: I0929 09:49:26.377896 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 09:49:26 crc kubenswrapper[4779]: I0929 09:49:26.536110 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/769b375a-f6f8-4343-897c-622d8a7306d0-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"769b375a-f6f8-4343-897c-622d8a7306d0\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 09:49:26 crc kubenswrapper[4779]: I0929 09:49:26.536287 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hst2b\" (UniqueName: \"kubernetes.io/projected/769b375a-f6f8-4343-897c-622d8a7306d0-kube-api-access-hst2b\") pod \"nova-cell1-novncproxy-0\" (UID: \"769b375a-f6f8-4343-897c-622d8a7306d0\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 09:49:26 crc kubenswrapper[4779]: I0929 09:49:26.536325 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/769b375a-f6f8-4343-897c-622d8a7306d0-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"769b375a-f6f8-4343-897c-622d8a7306d0\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 09:49:26 crc kubenswrapper[4779]: I0929 09:49:26.536384 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/769b375a-f6f8-4343-897c-622d8a7306d0-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"769b375a-f6f8-4343-897c-622d8a7306d0\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 09:49:26 crc kubenswrapper[4779]: I0929 09:49:26.536413 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/769b375a-f6f8-4343-897c-622d8a7306d0-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"769b375a-f6f8-4343-897c-622d8a7306d0\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 09:49:26 crc kubenswrapper[4779]: I0929 09:49:26.638502 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/769b375a-f6f8-4343-897c-622d8a7306d0-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"769b375a-f6f8-4343-897c-622d8a7306d0\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 09:49:26 crc kubenswrapper[4779]: I0929 09:49:26.638544 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/769b375a-f6f8-4343-897c-622d8a7306d0-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"769b375a-f6f8-4343-897c-622d8a7306d0\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 09:49:26 crc kubenswrapper[4779]: I0929 09:49:26.638572 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/769b375a-f6f8-4343-897c-622d8a7306d0-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"769b375a-f6f8-4343-897c-622d8a7306d0\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 09:49:26 crc kubenswrapper[4779]: I0929 09:49:26.638681 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hst2b\" (UniqueName: \"kubernetes.io/projected/769b375a-f6f8-4343-897c-622d8a7306d0-kube-api-access-hst2b\") pod \"nova-cell1-novncproxy-0\" (UID: \"769b375a-f6f8-4343-897c-622d8a7306d0\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 09:49:26 crc kubenswrapper[4779]: I0929 09:49:26.638707 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/769b375a-f6f8-4343-897c-622d8a7306d0-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"769b375a-f6f8-4343-897c-622d8a7306d0\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 09:49:26 crc kubenswrapper[4779]: I0929 09:49:26.642611 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/769b375a-f6f8-4343-897c-622d8a7306d0-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"769b375a-f6f8-4343-897c-622d8a7306d0\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 09:49:26 crc kubenswrapper[4779]: I0929 09:49:26.642615 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/769b375a-f6f8-4343-897c-622d8a7306d0-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"769b375a-f6f8-4343-897c-622d8a7306d0\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 09:49:26 crc kubenswrapper[4779]: I0929 09:49:26.643604 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/769b375a-f6f8-4343-897c-622d8a7306d0-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"769b375a-f6f8-4343-897c-622d8a7306d0\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 09:49:26 crc kubenswrapper[4779]: I0929 09:49:26.653255 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/769b375a-f6f8-4343-897c-622d8a7306d0-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"769b375a-f6f8-4343-897c-622d8a7306d0\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 09:49:26 crc kubenswrapper[4779]: I0929 09:49:26.658187 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hst2b\" (UniqueName: \"kubernetes.io/projected/769b375a-f6f8-4343-897c-622d8a7306d0-kube-api-access-hst2b\") pod \"nova-cell1-novncproxy-0\" (UID: \"769b375a-f6f8-4343-897c-622d8a7306d0\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 09:49:26 crc kubenswrapper[4779]: I0929 09:49:26.671558 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 29 09:49:26 crc kubenswrapper[4779]: I0929 09:49:26.725108 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="39e1d56f-d495-4b29-b2c7-409bf0e79550" path="/var/lib/kubelet/pods/39e1d56f-d495-4b29-b2c7-409bf0e79550/volumes" Sep 29 09:49:27 crc kubenswrapper[4779]: I0929 09:49:27.146236 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 29 09:49:27 crc kubenswrapper[4779]: I0929 09:49:27.146961 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 29 09:49:27 crc kubenswrapper[4779]: I0929 09:49:27.150325 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 29 09:49:27 crc kubenswrapper[4779]: I0929 09:49:27.155713 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 29 09:49:27 crc kubenswrapper[4779]: I0929 09:49:27.202533 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 09:49:27 crc kubenswrapper[4779]: I0929 09:49:27.992604 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"769b375a-f6f8-4343-897c-622d8a7306d0","Type":"ContainerStarted","Data":"b414cbd0db4bb6c6bb789fd2baf167ed3dd7e14b1f4089d169320ba3fda53b4b"} Sep 29 09:49:27 crc kubenswrapper[4779]: I0929 09:49:27.992991 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 29 09:49:27 crc kubenswrapper[4779]: I0929 09:49:27.993009 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"769b375a-f6f8-4343-897c-622d8a7306d0","Type":"ContainerStarted","Data":"466b520350d3817b31072c7ee5a6678ffc5172e5bc975529fd8bf3a4cb3cb0df"} Sep 29 09:49:28 crc kubenswrapper[4779]: I0929 09:49:28.005299 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 29 09:49:28 crc kubenswrapper[4779]: I0929 09:49:28.021395 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.02137493 podStartE2EDuration="2.02137493s" podCreationTimestamp="2025-09-29 09:49:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:49:28.008684677 +0000 UTC m=+1199.990008591" watchObservedRunningTime="2025-09-29 09:49:28.02137493 +0000 UTC m=+1200.002698834" Sep 29 09:49:28 crc kubenswrapper[4779]: I0929 09:49:28.172491 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6ddb8fc477-92j54"] Sep 29 09:49:28 crc kubenswrapper[4779]: I0929 09:49:28.174016 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6ddb8fc477-92j54" Sep 29 09:49:28 crc kubenswrapper[4779]: I0929 09:49:28.196296 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6ddb8fc477-92j54"] Sep 29 09:49:28 crc kubenswrapper[4779]: I0929 09:49:28.273540 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/aab89cc0-c414-4e7f-8ca9-6a07e6b232e8-dns-svc\") pod \"dnsmasq-dns-6ddb8fc477-92j54\" (UID: \"aab89cc0-c414-4e7f-8ca9-6a07e6b232e8\") " pod="openstack/dnsmasq-dns-6ddb8fc477-92j54" Sep 29 09:49:28 crc kubenswrapper[4779]: I0929 09:49:28.273591 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aab89cc0-c414-4e7f-8ca9-6a07e6b232e8-config\") pod \"dnsmasq-dns-6ddb8fc477-92j54\" (UID: \"aab89cc0-c414-4e7f-8ca9-6a07e6b232e8\") " pod="openstack/dnsmasq-dns-6ddb8fc477-92j54" Sep 29 09:49:28 crc kubenswrapper[4779]: I0929 09:49:28.273611 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/aab89cc0-c414-4e7f-8ca9-6a07e6b232e8-ovsdbserver-sb\") pod \"dnsmasq-dns-6ddb8fc477-92j54\" (UID: \"aab89cc0-c414-4e7f-8ca9-6a07e6b232e8\") " pod="openstack/dnsmasq-dns-6ddb8fc477-92j54" Sep 29 09:49:28 crc kubenswrapper[4779]: I0929 09:49:28.273656 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g7dd4\" (UniqueName: \"kubernetes.io/projected/aab89cc0-c414-4e7f-8ca9-6a07e6b232e8-kube-api-access-g7dd4\") pod \"dnsmasq-dns-6ddb8fc477-92j54\" (UID: \"aab89cc0-c414-4e7f-8ca9-6a07e6b232e8\") " pod="openstack/dnsmasq-dns-6ddb8fc477-92j54" Sep 29 09:49:28 crc kubenswrapper[4779]: I0929 09:49:28.273700 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/aab89cc0-c414-4e7f-8ca9-6a07e6b232e8-ovsdbserver-nb\") pod \"dnsmasq-dns-6ddb8fc477-92j54\" (UID: \"aab89cc0-c414-4e7f-8ca9-6a07e6b232e8\") " pod="openstack/dnsmasq-dns-6ddb8fc477-92j54" Sep 29 09:49:28 crc kubenswrapper[4779]: I0929 09:49:28.374784 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/aab89cc0-c414-4e7f-8ca9-6a07e6b232e8-ovsdbserver-nb\") pod \"dnsmasq-dns-6ddb8fc477-92j54\" (UID: \"aab89cc0-c414-4e7f-8ca9-6a07e6b232e8\") " pod="openstack/dnsmasq-dns-6ddb8fc477-92j54" Sep 29 09:49:28 crc kubenswrapper[4779]: I0929 09:49:28.375004 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/aab89cc0-c414-4e7f-8ca9-6a07e6b232e8-dns-svc\") pod \"dnsmasq-dns-6ddb8fc477-92j54\" (UID: \"aab89cc0-c414-4e7f-8ca9-6a07e6b232e8\") " pod="openstack/dnsmasq-dns-6ddb8fc477-92j54" Sep 29 09:49:28 crc kubenswrapper[4779]: I0929 09:49:28.375038 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aab89cc0-c414-4e7f-8ca9-6a07e6b232e8-config\") pod \"dnsmasq-dns-6ddb8fc477-92j54\" (UID: \"aab89cc0-c414-4e7f-8ca9-6a07e6b232e8\") " pod="openstack/dnsmasq-dns-6ddb8fc477-92j54" Sep 29 09:49:28 crc kubenswrapper[4779]: I0929 09:49:28.375056 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/aab89cc0-c414-4e7f-8ca9-6a07e6b232e8-ovsdbserver-sb\") pod \"dnsmasq-dns-6ddb8fc477-92j54\" (UID: \"aab89cc0-c414-4e7f-8ca9-6a07e6b232e8\") " pod="openstack/dnsmasq-dns-6ddb8fc477-92j54" Sep 29 09:49:28 crc kubenswrapper[4779]: I0929 09:49:28.375099 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g7dd4\" (UniqueName: \"kubernetes.io/projected/aab89cc0-c414-4e7f-8ca9-6a07e6b232e8-kube-api-access-g7dd4\") pod \"dnsmasq-dns-6ddb8fc477-92j54\" (UID: \"aab89cc0-c414-4e7f-8ca9-6a07e6b232e8\") " pod="openstack/dnsmasq-dns-6ddb8fc477-92j54" Sep 29 09:49:28 crc kubenswrapper[4779]: I0929 09:49:28.376027 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/aab89cc0-c414-4e7f-8ca9-6a07e6b232e8-dns-svc\") pod \"dnsmasq-dns-6ddb8fc477-92j54\" (UID: \"aab89cc0-c414-4e7f-8ca9-6a07e6b232e8\") " pod="openstack/dnsmasq-dns-6ddb8fc477-92j54" Sep 29 09:49:28 crc kubenswrapper[4779]: I0929 09:49:28.376033 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/aab89cc0-c414-4e7f-8ca9-6a07e6b232e8-ovsdbserver-sb\") pod \"dnsmasq-dns-6ddb8fc477-92j54\" (UID: \"aab89cc0-c414-4e7f-8ca9-6a07e6b232e8\") " pod="openstack/dnsmasq-dns-6ddb8fc477-92j54" Sep 29 09:49:28 crc kubenswrapper[4779]: I0929 09:49:28.376222 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/aab89cc0-c414-4e7f-8ca9-6a07e6b232e8-ovsdbserver-nb\") pod \"dnsmasq-dns-6ddb8fc477-92j54\" (UID: \"aab89cc0-c414-4e7f-8ca9-6a07e6b232e8\") " pod="openstack/dnsmasq-dns-6ddb8fc477-92j54" Sep 29 09:49:28 crc kubenswrapper[4779]: I0929 09:49:28.376640 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aab89cc0-c414-4e7f-8ca9-6a07e6b232e8-config\") pod \"dnsmasq-dns-6ddb8fc477-92j54\" (UID: \"aab89cc0-c414-4e7f-8ca9-6a07e6b232e8\") " pod="openstack/dnsmasq-dns-6ddb8fc477-92j54" Sep 29 09:49:28 crc kubenswrapper[4779]: I0929 09:49:28.391519 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g7dd4\" (UniqueName: \"kubernetes.io/projected/aab89cc0-c414-4e7f-8ca9-6a07e6b232e8-kube-api-access-g7dd4\") pod \"dnsmasq-dns-6ddb8fc477-92j54\" (UID: \"aab89cc0-c414-4e7f-8ca9-6a07e6b232e8\") " pod="openstack/dnsmasq-dns-6ddb8fc477-92j54" Sep 29 09:49:28 crc kubenswrapper[4779]: I0929 09:49:28.512243 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6ddb8fc477-92j54" Sep 29 09:49:28 crc kubenswrapper[4779]: I0929 09:49:28.987570 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6ddb8fc477-92j54"] Sep 29 09:49:28 crc kubenswrapper[4779]: W0929 09:49:28.991938 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaab89cc0_c414_4e7f_8ca9_6a07e6b232e8.slice/crio-d2dcf685b74f5f706a258dbeaf1b8630013ba2db333fbb4407e408ca0fc83563 WatchSource:0}: Error finding container d2dcf685b74f5f706a258dbeaf1b8630013ba2db333fbb4407e408ca0fc83563: Status 404 returned error can't find the container with id d2dcf685b74f5f706a258dbeaf1b8630013ba2db333fbb4407e408ca0fc83563 Sep 29 09:49:29 crc kubenswrapper[4779]: I0929 09:49:29.008530 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6ddb8fc477-92j54" event={"ID":"aab89cc0-c414-4e7f-8ca9-6a07e6b232e8","Type":"ContainerStarted","Data":"d2dcf685b74f5f706a258dbeaf1b8630013ba2db333fbb4407e408ca0fc83563"} Sep 29 09:49:30 crc kubenswrapper[4779]: I0929 09:49:30.019588 4779 generic.go:334] "Generic (PLEG): container finished" podID="aab89cc0-c414-4e7f-8ca9-6a07e6b232e8" containerID="71be26d0a1a7a34f2f74bd1de50478265755b7e0ecc6904816afd739e064e477" exitCode=0 Sep 29 09:49:30 crc kubenswrapper[4779]: I0929 09:49:30.021119 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6ddb8fc477-92j54" event={"ID":"aab89cc0-c414-4e7f-8ca9-6a07e6b232e8","Type":"ContainerDied","Data":"71be26d0a1a7a34f2f74bd1de50478265755b7e0ecc6904816afd739e064e477"} Sep 29 09:49:30 crc kubenswrapper[4779]: I0929 09:49:30.556319 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 09:49:30 crc kubenswrapper[4779]: I0929 09:49:30.556896 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3074cf44-792f-4339-86f3-cd9dcdcd534f" containerName="ceilometer-central-agent" containerID="cri-o://6c85f90fc00feca86a2a212b751e91f2e1eae8f5977c2f66792caac9210521ea" gracePeriod=30 Sep 29 09:49:30 crc kubenswrapper[4779]: I0929 09:49:30.557633 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3074cf44-792f-4339-86f3-cd9dcdcd534f" containerName="proxy-httpd" containerID="cri-o://71976e87f0d11a9c4a93ab2ecd5ebd26fb9b5efb0c7db1a184af6ef810ca4d34" gracePeriod=30 Sep 29 09:49:30 crc kubenswrapper[4779]: I0929 09:49:30.557687 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3074cf44-792f-4339-86f3-cd9dcdcd534f" containerName="sg-core" containerID="cri-o://96578404e3dcee2ccb503f6677aec18c57ae49cfcd981a91840ecbfef7fa8c86" gracePeriod=30 Sep 29 09:49:30 crc kubenswrapper[4779]: I0929 09:49:30.557718 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3074cf44-792f-4339-86f3-cd9dcdcd534f" containerName="ceilometer-notification-agent" containerID="cri-o://92818d526b2f683814fff80074ee525441a8689ba7c7b4544b40b9be675c1cb6" gracePeriod=30 Sep 29 09:49:30 crc kubenswrapper[4779]: I0929 09:49:30.567167 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="3074cf44-792f-4339-86f3-cd9dcdcd534f" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.196:3000/\": read tcp 10.217.0.2:33528->10.217.0.196:3000: read: connection reset by peer" Sep 29 09:49:30 crc kubenswrapper[4779]: I0929 09:49:30.605519 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 29 09:49:31 crc kubenswrapper[4779]: I0929 09:49:31.050472 4779 generic.go:334] "Generic (PLEG): container finished" podID="3074cf44-792f-4339-86f3-cd9dcdcd534f" containerID="71976e87f0d11a9c4a93ab2ecd5ebd26fb9b5efb0c7db1a184af6ef810ca4d34" exitCode=0 Sep 29 09:49:31 crc kubenswrapper[4779]: I0929 09:49:31.050517 4779 generic.go:334] "Generic (PLEG): container finished" podID="3074cf44-792f-4339-86f3-cd9dcdcd534f" containerID="96578404e3dcee2ccb503f6677aec18c57ae49cfcd981a91840ecbfef7fa8c86" exitCode=2 Sep 29 09:49:31 crc kubenswrapper[4779]: I0929 09:49:31.050528 4779 generic.go:334] "Generic (PLEG): container finished" podID="3074cf44-792f-4339-86f3-cd9dcdcd534f" containerID="6c85f90fc00feca86a2a212b751e91f2e1eae8f5977c2f66792caac9210521ea" exitCode=0 Sep 29 09:49:31 crc kubenswrapper[4779]: I0929 09:49:31.050637 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3074cf44-792f-4339-86f3-cd9dcdcd534f","Type":"ContainerDied","Data":"71976e87f0d11a9c4a93ab2ecd5ebd26fb9b5efb0c7db1a184af6ef810ca4d34"} Sep 29 09:49:31 crc kubenswrapper[4779]: I0929 09:49:31.050674 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3074cf44-792f-4339-86f3-cd9dcdcd534f","Type":"ContainerDied","Data":"96578404e3dcee2ccb503f6677aec18c57ae49cfcd981a91840ecbfef7fa8c86"} Sep 29 09:49:31 crc kubenswrapper[4779]: I0929 09:49:31.051061 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3074cf44-792f-4339-86f3-cd9dcdcd534f","Type":"ContainerDied","Data":"6c85f90fc00feca86a2a212b751e91f2e1eae8f5977c2f66792caac9210521ea"} Sep 29 09:49:31 crc kubenswrapper[4779]: I0929 09:49:31.054636 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6ddb8fc477-92j54" event={"ID":"aab89cc0-c414-4e7f-8ca9-6a07e6b232e8","Type":"ContainerStarted","Data":"a60de987efc29d7265c45ce82cbb435867469181ab0b0df8f3bc9ab960a36e89"} Sep 29 09:49:31 crc kubenswrapper[4779]: I0929 09:49:31.055409 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="b0d24523-ec54-4af5-90ce-b9c9188f92d9" containerName="nova-api-log" containerID="cri-o://28f892ab57952c3fdc8c7789725e2f68fb97fd8a089822304b62069e3624d268" gracePeriod=30 Sep 29 09:49:31 crc kubenswrapper[4779]: I0929 09:49:31.055702 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="b0d24523-ec54-4af5-90ce-b9c9188f92d9" containerName="nova-api-api" containerID="cri-o://cff9016bd7da203083c8e2815ea78c219fc3d726c9ea78a3acd4d696dd50030e" gracePeriod=30 Sep 29 09:49:31 crc kubenswrapper[4779]: I0929 09:49:31.091994 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6ddb8fc477-92j54" podStartSLOduration=3.091975293 podStartE2EDuration="3.091975293s" podCreationTimestamp="2025-09-29 09:49:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:49:31.078700374 +0000 UTC m=+1203.060024278" watchObservedRunningTime="2025-09-29 09:49:31.091975293 +0000 UTC m=+1203.073299197" Sep 29 09:49:31 crc kubenswrapper[4779]: I0929 09:49:31.672349 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Sep 29 09:49:32 crc kubenswrapper[4779]: I0929 09:49:32.064690 4779 generic.go:334] "Generic (PLEG): container finished" podID="b0d24523-ec54-4af5-90ce-b9c9188f92d9" containerID="cff9016bd7da203083c8e2815ea78c219fc3d726c9ea78a3acd4d696dd50030e" exitCode=0 Sep 29 09:49:32 crc kubenswrapper[4779]: I0929 09:49:32.064717 4779 generic.go:334] "Generic (PLEG): container finished" podID="b0d24523-ec54-4af5-90ce-b9c9188f92d9" containerID="28f892ab57952c3fdc8c7789725e2f68fb97fd8a089822304b62069e3624d268" exitCode=143 Sep 29 09:49:32 crc kubenswrapper[4779]: I0929 09:49:32.065597 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b0d24523-ec54-4af5-90ce-b9c9188f92d9","Type":"ContainerDied","Data":"cff9016bd7da203083c8e2815ea78c219fc3d726c9ea78a3acd4d696dd50030e"} Sep 29 09:49:32 crc kubenswrapper[4779]: I0929 09:49:32.065625 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6ddb8fc477-92j54" Sep 29 09:49:32 crc kubenswrapper[4779]: I0929 09:49:32.065636 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b0d24523-ec54-4af5-90ce-b9c9188f92d9","Type":"ContainerDied","Data":"28f892ab57952c3fdc8c7789725e2f68fb97fd8a089822304b62069e3624d268"} Sep 29 09:49:32 crc kubenswrapper[4779]: I0929 09:49:32.322510 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 09:49:32 crc kubenswrapper[4779]: I0929 09:49:32.358421 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b0d24523-ec54-4af5-90ce-b9c9188f92d9-logs\") pod \"b0d24523-ec54-4af5-90ce-b9c9188f92d9\" (UID: \"b0d24523-ec54-4af5-90ce-b9c9188f92d9\") " Sep 29 09:49:32 crc kubenswrapper[4779]: I0929 09:49:32.358544 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hjcrg\" (UniqueName: \"kubernetes.io/projected/b0d24523-ec54-4af5-90ce-b9c9188f92d9-kube-api-access-hjcrg\") pod \"b0d24523-ec54-4af5-90ce-b9c9188f92d9\" (UID: \"b0d24523-ec54-4af5-90ce-b9c9188f92d9\") " Sep 29 09:49:32 crc kubenswrapper[4779]: I0929 09:49:32.358748 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0d24523-ec54-4af5-90ce-b9c9188f92d9-combined-ca-bundle\") pod \"b0d24523-ec54-4af5-90ce-b9c9188f92d9\" (UID: \"b0d24523-ec54-4af5-90ce-b9c9188f92d9\") " Sep 29 09:49:32 crc kubenswrapper[4779]: I0929 09:49:32.358837 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b0d24523-ec54-4af5-90ce-b9c9188f92d9-config-data\") pod \"b0d24523-ec54-4af5-90ce-b9c9188f92d9\" (UID: \"b0d24523-ec54-4af5-90ce-b9c9188f92d9\") " Sep 29 09:49:32 crc kubenswrapper[4779]: I0929 09:49:32.358917 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b0d24523-ec54-4af5-90ce-b9c9188f92d9-logs" (OuterVolumeSpecName: "logs") pod "b0d24523-ec54-4af5-90ce-b9c9188f92d9" (UID: "b0d24523-ec54-4af5-90ce-b9c9188f92d9"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:49:32 crc kubenswrapper[4779]: I0929 09:49:32.359349 4779 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b0d24523-ec54-4af5-90ce-b9c9188f92d9-logs\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:32 crc kubenswrapper[4779]: I0929 09:49:32.364476 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b0d24523-ec54-4af5-90ce-b9c9188f92d9-kube-api-access-hjcrg" (OuterVolumeSpecName: "kube-api-access-hjcrg") pod "b0d24523-ec54-4af5-90ce-b9c9188f92d9" (UID: "b0d24523-ec54-4af5-90ce-b9c9188f92d9"). InnerVolumeSpecName "kube-api-access-hjcrg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:49:32 crc kubenswrapper[4779]: I0929 09:49:32.387077 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b0d24523-ec54-4af5-90ce-b9c9188f92d9-config-data" (OuterVolumeSpecName: "config-data") pod "b0d24523-ec54-4af5-90ce-b9c9188f92d9" (UID: "b0d24523-ec54-4af5-90ce-b9c9188f92d9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:49:32 crc kubenswrapper[4779]: I0929 09:49:32.394849 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b0d24523-ec54-4af5-90ce-b9c9188f92d9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b0d24523-ec54-4af5-90ce-b9c9188f92d9" (UID: "b0d24523-ec54-4af5-90ce-b9c9188f92d9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:49:32 crc kubenswrapper[4779]: I0929 09:49:32.461266 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0d24523-ec54-4af5-90ce-b9c9188f92d9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:32 crc kubenswrapper[4779]: I0929 09:49:32.461307 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b0d24523-ec54-4af5-90ce-b9c9188f92d9-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:32 crc kubenswrapper[4779]: I0929 09:49:32.461318 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hjcrg\" (UniqueName: \"kubernetes.io/projected/b0d24523-ec54-4af5-90ce-b9c9188f92d9-kube-api-access-hjcrg\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:33 crc kubenswrapper[4779]: I0929 09:49:33.066481 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="3074cf44-792f-4339-86f3-cd9dcdcd534f" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.196:3000/\": dial tcp 10.217.0.196:3000: connect: connection refused" Sep 29 09:49:33 crc kubenswrapper[4779]: I0929 09:49:33.075802 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b0d24523-ec54-4af5-90ce-b9c9188f92d9","Type":"ContainerDied","Data":"0cbceda1324728306f5da5c63b39fcdf25cfa633a67bc0e4f4a6456f452fed3b"} Sep 29 09:49:33 crc kubenswrapper[4779]: I0929 09:49:33.075831 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 09:49:33 crc kubenswrapper[4779]: I0929 09:49:33.076030 4779 scope.go:117] "RemoveContainer" containerID="cff9016bd7da203083c8e2815ea78c219fc3d726c9ea78a3acd4d696dd50030e" Sep 29 09:49:33 crc kubenswrapper[4779]: I0929 09:49:33.109751 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 29 09:49:33 crc kubenswrapper[4779]: I0929 09:49:33.114608 4779 scope.go:117] "RemoveContainer" containerID="28f892ab57952c3fdc8c7789725e2f68fb97fd8a089822304b62069e3624d268" Sep 29 09:49:33 crc kubenswrapper[4779]: I0929 09:49:33.127028 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Sep 29 09:49:33 crc kubenswrapper[4779]: I0929 09:49:33.148256 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 29 09:49:33 crc kubenswrapper[4779]: E0929 09:49:33.148809 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0d24523-ec54-4af5-90ce-b9c9188f92d9" containerName="nova-api-api" Sep 29 09:49:33 crc kubenswrapper[4779]: I0929 09:49:33.148834 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0d24523-ec54-4af5-90ce-b9c9188f92d9" containerName="nova-api-api" Sep 29 09:49:33 crc kubenswrapper[4779]: E0929 09:49:33.148884 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0d24523-ec54-4af5-90ce-b9c9188f92d9" containerName="nova-api-log" Sep 29 09:49:33 crc kubenswrapper[4779]: I0929 09:49:33.148893 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0d24523-ec54-4af5-90ce-b9c9188f92d9" containerName="nova-api-log" Sep 29 09:49:33 crc kubenswrapper[4779]: I0929 09:49:33.149140 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="b0d24523-ec54-4af5-90ce-b9c9188f92d9" containerName="nova-api-log" Sep 29 09:49:33 crc kubenswrapper[4779]: I0929 09:49:33.149175 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="b0d24523-ec54-4af5-90ce-b9c9188f92d9" containerName="nova-api-api" Sep 29 09:49:33 crc kubenswrapper[4779]: I0929 09:49:33.150446 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 09:49:33 crc kubenswrapper[4779]: I0929 09:49:33.152983 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Sep 29 09:49:33 crc kubenswrapper[4779]: I0929 09:49:33.153313 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Sep 29 09:49:33 crc kubenswrapper[4779]: I0929 09:49:33.163332 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 29 09:49:33 crc kubenswrapper[4779]: I0929 09:49:33.163462 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 29 09:49:33 crc kubenswrapper[4779]: I0929 09:49:33.173141 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/72361da2-53e5-428d-acb4-4c611f745f5d-internal-tls-certs\") pod \"nova-api-0\" (UID: \"72361da2-53e5-428d-acb4-4c611f745f5d\") " pod="openstack/nova-api-0" Sep 29 09:49:33 crc kubenswrapper[4779]: I0929 09:49:33.173228 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/72361da2-53e5-428d-acb4-4c611f745f5d-public-tls-certs\") pod \"nova-api-0\" (UID: \"72361da2-53e5-428d-acb4-4c611f745f5d\") " pod="openstack/nova-api-0" Sep 29 09:49:33 crc kubenswrapper[4779]: I0929 09:49:33.173375 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xx2ck\" (UniqueName: \"kubernetes.io/projected/72361da2-53e5-428d-acb4-4c611f745f5d-kube-api-access-xx2ck\") pod \"nova-api-0\" (UID: \"72361da2-53e5-428d-acb4-4c611f745f5d\") " pod="openstack/nova-api-0" Sep 29 09:49:33 crc kubenswrapper[4779]: I0929 09:49:33.173444 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72361da2-53e5-428d-acb4-4c611f745f5d-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"72361da2-53e5-428d-acb4-4c611f745f5d\") " pod="openstack/nova-api-0" Sep 29 09:49:33 crc kubenswrapper[4779]: I0929 09:49:33.173570 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72361da2-53e5-428d-acb4-4c611f745f5d-config-data\") pod \"nova-api-0\" (UID: \"72361da2-53e5-428d-acb4-4c611f745f5d\") " pod="openstack/nova-api-0" Sep 29 09:49:33 crc kubenswrapper[4779]: I0929 09:49:33.173652 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/72361da2-53e5-428d-acb4-4c611f745f5d-logs\") pod \"nova-api-0\" (UID: \"72361da2-53e5-428d-acb4-4c611f745f5d\") " pod="openstack/nova-api-0" Sep 29 09:49:33 crc kubenswrapper[4779]: I0929 09:49:33.275398 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/72361da2-53e5-428d-acb4-4c611f745f5d-logs\") pod \"nova-api-0\" (UID: \"72361da2-53e5-428d-acb4-4c611f745f5d\") " pod="openstack/nova-api-0" Sep 29 09:49:33 crc kubenswrapper[4779]: I0929 09:49:33.275500 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/72361da2-53e5-428d-acb4-4c611f745f5d-internal-tls-certs\") pod \"nova-api-0\" (UID: \"72361da2-53e5-428d-acb4-4c611f745f5d\") " pod="openstack/nova-api-0" Sep 29 09:49:33 crc kubenswrapper[4779]: I0929 09:49:33.275534 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/72361da2-53e5-428d-acb4-4c611f745f5d-public-tls-certs\") pod \"nova-api-0\" (UID: \"72361da2-53e5-428d-acb4-4c611f745f5d\") " pod="openstack/nova-api-0" Sep 29 09:49:33 crc kubenswrapper[4779]: I0929 09:49:33.275614 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xx2ck\" (UniqueName: \"kubernetes.io/projected/72361da2-53e5-428d-acb4-4c611f745f5d-kube-api-access-xx2ck\") pod \"nova-api-0\" (UID: \"72361da2-53e5-428d-acb4-4c611f745f5d\") " pod="openstack/nova-api-0" Sep 29 09:49:33 crc kubenswrapper[4779]: I0929 09:49:33.275652 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72361da2-53e5-428d-acb4-4c611f745f5d-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"72361da2-53e5-428d-acb4-4c611f745f5d\") " pod="openstack/nova-api-0" Sep 29 09:49:33 crc kubenswrapper[4779]: I0929 09:49:33.275710 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72361da2-53e5-428d-acb4-4c611f745f5d-config-data\") pod \"nova-api-0\" (UID: \"72361da2-53e5-428d-acb4-4c611f745f5d\") " pod="openstack/nova-api-0" Sep 29 09:49:33 crc kubenswrapper[4779]: I0929 09:49:33.276333 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/72361da2-53e5-428d-acb4-4c611f745f5d-logs\") pod \"nova-api-0\" (UID: \"72361da2-53e5-428d-acb4-4c611f745f5d\") " pod="openstack/nova-api-0" Sep 29 09:49:33 crc kubenswrapper[4779]: I0929 09:49:33.279741 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/72361da2-53e5-428d-acb4-4c611f745f5d-internal-tls-certs\") pod \"nova-api-0\" (UID: \"72361da2-53e5-428d-acb4-4c611f745f5d\") " pod="openstack/nova-api-0" Sep 29 09:49:33 crc kubenswrapper[4779]: I0929 09:49:33.279877 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72361da2-53e5-428d-acb4-4c611f745f5d-config-data\") pod \"nova-api-0\" (UID: \"72361da2-53e5-428d-acb4-4c611f745f5d\") " pod="openstack/nova-api-0" Sep 29 09:49:33 crc kubenswrapper[4779]: I0929 09:49:33.281185 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/72361da2-53e5-428d-acb4-4c611f745f5d-public-tls-certs\") pod \"nova-api-0\" (UID: \"72361da2-53e5-428d-acb4-4c611f745f5d\") " pod="openstack/nova-api-0" Sep 29 09:49:33 crc kubenswrapper[4779]: I0929 09:49:33.285856 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72361da2-53e5-428d-acb4-4c611f745f5d-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"72361da2-53e5-428d-acb4-4c611f745f5d\") " pod="openstack/nova-api-0" Sep 29 09:49:33 crc kubenswrapper[4779]: I0929 09:49:33.292695 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xx2ck\" (UniqueName: \"kubernetes.io/projected/72361da2-53e5-428d-acb4-4c611f745f5d-kube-api-access-xx2ck\") pod \"nova-api-0\" (UID: \"72361da2-53e5-428d-acb4-4c611f745f5d\") " pod="openstack/nova-api-0" Sep 29 09:49:33 crc kubenswrapper[4779]: I0929 09:49:33.479394 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 09:49:33 crc kubenswrapper[4779]: I0929 09:49:33.982959 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 29 09:49:33 crc kubenswrapper[4779]: W0929 09:49:33.985307 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod72361da2_53e5_428d_acb4_4c611f745f5d.slice/crio-870743f8df281741d12bcc12b792a67c6963987b6a197233e5ab76fcc5b0ab17 WatchSource:0}: Error finding container 870743f8df281741d12bcc12b792a67c6963987b6a197233e5ab76fcc5b0ab17: Status 404 returned error can't find the container with id 870743f8df281741d12bcc12b792a67c6963987b6a197233e5ab76fcc5b0ab17 Sep 29 09:49:34 crc kubenswrapper[4779]: I0929 09:49:34.086391 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"72361da2-53e5-428d-acb4-4c611f745f5d","Type":"ContainerStarted","Data":"870743f8df281741d12bcc12b792a67c6963987b6a197233e5ab76fcc5b0ab17"} Sep 29 09:49:34 crc kubenswrapper[4779]: I0929 09:49:34.089237 4779 generic.go:334] "Generic (PLEG): container finished" podID="3074cf44-792f-4339-86f3-cd9dcdcd534f" containerID="92818d526b2f683814fff80074ee525441a8689ba7c7b4544b40b9be675c1cb6" exitCode=0 Sep 29 09:49:34 crc kubenswrapper[4779]: I0929 09:49:34.089279 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3074cf44-792f-4339-86f3-cd9dcdcd534f","Type":"ContainerDied","Data":"92818d526b2f683814fff80074ee525441a8689ba7c7b4544b40b9be675c1cb6"} Sep 29 09:49:34 crc kubenswrapper[4779]: I0929 09:49:34.522270 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 09:49:34 crc kubenswrapper[4779]: I0929 09:49:34.602898 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3074cf44-792f-4339-86f3-cd9dcdcd534f-config-data\") pod \"3074cf44-792f-4339-86f3-cd9dcdcd534f\" (UID: \"3074cf44-792f-4339-86f3-cd9dcdcd534f\") " Sep 29 09:49:34 crc kubenswrapper[4779]: I0929 09:49:34.602977 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9n5tm\" (UniqueName: \"kubernetes.io/projected/3074cf44-792f-4339-86f3-cd9dcdcd534f-kube-api-access-9n5tm\") pod \"3074cf44-792f-4339-86f3-cd9dcdcd534f\" (UID: \"3074cf44-792f-4339-86f3-cd9dcdcd534f\") " Sep 29 09:49:34 crc kubenswrapper[4779]: I0929 09:49:34.603004 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3074cf44-792f-4339-86f3-cd9dcdcd534f-combined-ca-bundle\") pod \"3074cf44-792f-4339-86f3-cd9dcdcd534f\" (UID: \"3074cf44-792f-4339-86f3-cd9dcdcd534f\") " Sep 29 09:49:34 crc kubenswrapper[4779]: I0929 09:49:34.603054 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3074cf44-792f-4339-86f3-cd9dcdcd534f-sg-core-conf-yaml\") pod \"3074cf44-792f-4339-86f3-cd9dcdcd534f\" (UID: \"3074cf44-792f-4339-86f3-cd9dcdcd534f\") " Sep 29 09:49:34 crc kubenswrapper[4779]: I0929 09:49:34.603079 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3074cf44-792f-4339-86f3-cd9dcdcd534f-run-httpd\") pod \"3074cf44-792f-4339-86f3-cd9dcdcd534f\" (UID: \"3074cf44-792f-4339-86f3-cd9dcdcd534f\") " Sep 29 09:49:34 crc kubenswrapper[4779]: I0929 09:49:34.603151 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/3074cf44-792f-4339-86f3-cd9dcdcd534f-ceilometer-tls-certs\") pod \"3074cf44-792f-4339-86f3-cd9dcdcd534f\" (UID: \"3074cf44-792f-4339-86f3-cd9dcdcd534f\") " Sep 29 09:49:34 crc kubenswrapper[4779]: I0929 09:49:34.603281 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3074cf44-792f-4339-86f3-cd9dcdcd534f-scripts\") pod \"3074cf44-792f-4339-86f3-cd9dcdcd534f\" (UID: \"3074cf44-792f-4339-86f3-cd9dcdcd534f\") " Sep 29 09:49:34 crc kubenswrapper[4779]: I0929 09:49:34.603311 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3074cf44-792f-4339-86f3-cd9dcdcd534f-log-httpd\") pod \"3074cf44-792f-4339-86f3-cd9dcdcd534f\" (UID: \"3074cf44-792f-4339-86f3-cd9dcdcd534f\") " Sep 29 09:49:34 crc kubenswrapper[4779]: I0929 09:49:34.604178 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3074cf44-792f-4339-86f3-cd9dcdcd534f-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "3074cf44-792f-4339-86f3-cd9dcdcd534f" (UID: "3074cf44-792f-4339-86f3-cd9dcdcd534f"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:49:34 crc kubenswrapper[4779]: I0929 09:49:34.604376 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3074cf44-792f-4339-86f3-cd9dcdcd534f-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "3074cf44-792f-4339-86f3-cd9dcdcd534f" (UID: "3074cf44-792f-4339-86f3-cd9dcdcd534f"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:49:34 crc kubenswrapper[4779]: I0929 09:49:34.613082 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3074cf44-792f-4339-86f3-cd9dcdcd534f-scripts" (OuterVolumeSpecName: "scripts") pod "3074cf44-792f-4339-86f3-cd9dcdcd534f" (UID: "3074cf44-792f-4339-86f3-cd9dcdcd534f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:49:34 crc kubenswrapper[4779]: I0929 09:49:34.616216 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3074cf44-792f-4339-86f3-cd9dcdcd534f-kube-api-access-9n5tm" (OuterVolumeSpecName: "kube-api-access-9n5tm") pod "3074cf44-792f-4339-86f3-cd9dcdcd534f" (UID: "3074cf44-792f-4339-86f3-cd9dcdcd534f"). InnerVolumeSpecName "kube-api-access-9n5tm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:49:34 crc kubenswrapper[4779]: I0929 09:49:34.632143 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3074cf44-792f-4339-86f3-cd9dcdcd534f-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "3074cf44-792f-4339-86f3-cd9dcdcd534f" (UID: "3074cf44-792f-4339-86f3-cd9dcdcd534f"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:49:34 crc kubenswrapper[4779]: I0929 09:49:34.666948 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3074cf44-792f-4339-86f3-cd9dcdcd534f-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "3074cf44-792f-4339-86f3-cd9dcdcd534f" (UID: "3074cf44-792f-4339-86f3-cd9dcdcd534f"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:49:34 crc kubenswrapper[4779]: I0929 09:49:34.692841 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3074cf44-792f-4339-86f3-cd9dcdcd534f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3074cf44-792f-4339-86f3-cd9dcdcd534f" (UID: "3074cf44-792f-4339-86f3-cd9dcdcd534f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:49:34 crc kubenswrapper[4779]: I0929 09:49:34.706092 4779 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3074cf44-792f-4339-86f3-cd9dcdcd534f-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:34 crc kubenswrapper[4779]: I0929 09:49:34.706129 4779 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3074cf44-792f-4339-86f3-cd9dcdcd534f-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:34 crc kubenswrapper[4779]: I0929 09:49:34.706142 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9n5tm\" (UniqueName: \"kubernetes.io/projected/3074cf44-792f-4339-86f3-cd9dcdcd534f-kube-api-access-9n5tm\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:34 crc kubenswrapper[4779]: I0929 09:49:34.706158 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3074cf44-792f-4339-86f3-cd9dcdcd534f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:34 crc kubenswrapper[4779]: I0929 09:49:34.706170 4779 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3074cf44-792f-4339-86f3-cd9dcdcd534f-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:34 crc kubenswrapper[4779]: I0929 09:49:34.706179 4779 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3074cf44-792f-4339-86f3-cd9dcdcd534f-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:34 crc kubenswrapper[4779]: I0929 09:49:34.706190 4779 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/3074cf44-792f-4339-86f3-cd9dcdcd534f-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:34 crc kubenswrapper[4779]: I0929 09:49:34.724481 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3074cf44-792f-4339-86f3-cd9dcdcd534f-config-data" (OuterVolumeSpecName: "config-data") pod "3074cf44-792f-4339-86f3-cd9dcdcd534f" (UID: "3074cf44-792f-4339-86f3-cd9dcdcd534f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:49:34 crc kubenswrapper[4779]: I0929 09:49:34.726394 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b0d24523-ec54-4af5-90ce-b9c9188f92d9" path="/var/lib/kubelet/pods/b0d24523-ec54-4af5-90ce-b9c9188f92d9/volumes" Sep 29 09:49:34 crc kubenswrapper[4779]: I0929 09:49:34.809941 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3074cf44-792f-4339-86f3-cd9dcdcd534f-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:35 crc kubenswrapper[4779]: I0929 09:49:35.101382 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"72361da2-53e5-428d-acb4-4c611f745f5d","Type":"ContainerStarted","Data":"ce5308a7b91d9426121f3efc328e2d75be4e7c7ec8c960a612bc5ef9d20712de"} Sep 29 09:49:35 crc kubenswrapper[4779]: I0929 09:49:35.101429 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"72361da2-53e5-428d-acb4-4c611f745f5d","Type":"ContainerStarted","Data":"320c392276452770ec0d97fb0c15d202fa3b321f41d1166b38eeebaf53f740e2"} Sep 29 09:49:35 crc kubenswrapper[4779]: I0929 09:49:35.105324 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3074cf44-792f-4339-86f3-cd9dcdcd534f","Type":"ContainerDied","Data":"7211ff1809b6efabef80354668837640a4704a5c9b87e44480c9d1e6608d8a4e"} Sep 29 09:49:35 crc kubenswrapper[4779]: I0929 09:49:35.105394 4779 scope.go:117] "RemoveContainer" containerID="71976e87f0d11a9c4a93ab2ecd5ebd26fb9b5efb0c7db1a184af6ef810ca4d34" Sep 29 09:49:35 crc kubenswrapper[4779]: I0929 09:49:35.105448 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 09:49:35 crc kubenswrapper[4779]: I0929 09:49:35.129024 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.129009238 podStartE2EDuration="2.129009238s" podCreationTimestamp="2025-09-29 09:49:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:49:35.125283941 +0000 UTC m=+1207.106607855" watchObservedRunningTime="2025-09-29 09:49:35.129009238 +0000 UTC m=+1207.110333142" Sep 29 09:49:35 crc kubenswrapper[4779]: I0929 09:49:35.140560 4779 scope.go:117] "RemoveContainer" containerID="96578404e3dcee2ccb503f6677aec18c57ae49cfcd981a91840ecbfef7fa8c86" Sep 29 09:49:35 crc kubenswrapper[4779]: I0929 09:49:35.153134 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 09:49:35 crc kubenswrapper[4779]: I0929 09:49:35.171983 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 29 09:49:35 crc kubenswrapper[4779]: I0929 09:49:35.173606 4779 scope.go:117] "RemoveContainer" containerID="92818d526b2f683814fff80074ee525441a8689ba7c7b4544b40b9be675c1cb6" Sep 29 09:49:35 crc kubenswrapper[4779]: I0929 09:49:35.191374 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 09:49:35 crc kubenswrapper[4779]: E0929 09:49:35.191809 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3074cf44-792f-4339-86f3-cd9dcdcd534f" containerName="ceilometer-central-agent" Sep 29 09:49:35 crc kubenswrapper[4779]: I0929 09:49:35.191834 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="3074cf44-792f-4339-86f3-cd9dcdcd534f" containerName="ceilometer-central-agent" Sep 29 09:49:35 crc kubenswrapper[4779]: E0929 09:49:35.191845 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3074cf44-792f-4339-86f3-cd9dcdcd534f" containerName="ceilometer-notification-agent" Sep 29 09:49:35 crc kubenswrapper[4779]: I0929 09:49:35.191851 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="3074cf44-792f-4339-86f3-cd9dcdcd534f" containerName="ceilometer-notification-agent" Sep 29 09:49:35 crc kubenswrapper[4779]: E0929 09:49:35.191866 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3074cf44-792f-4339-86f3-cd9dcdcd534f" containerName="sg-core" Sep 29 09:49:35 crc kubenswrapper[4779]: I0929 09:49:35.191872 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="3074cf44-792f-4339-86f3-cd9dcdcd534f" containerName="sg-core" Sep 29 09:49:35 crc kubenswrapper[4779]: E0929 09:49:35.191892 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3074cf44-792f-4339-86f3-cd9dcdcd534f" containerName="proxy-httpd" Sep 29 09:49:35 crc kubenswrapper[4779]: I0929 09:49:35.191912 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="3074cf44-792f-4339-86f3-cd9dcdcd534f" containerName="proxy-httpd" Sep 29 09:49:35 crc kubenswrapper[4779]: I0929 09:49:35.192090 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="3074cf44-792f-4339-86f3-cd9dcdcd534f" containerName="sg-core" Sep 29 09:49:35 crc kubenswrapper[4779]: I0929 09:49:35.192112 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="3074cf44-792f-4339-86f3-cd9dcdcd534f" containerName="ceilometer-central-agent" Sep 29 09:49:35 crc kubenswrapper[4779]: I0929 09:49:35.192122 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="3074cf44-792f-4339-86f3-cd9dcdcd534f" containerName="proxy-httpd" Sep 29 09:49:35 crc kubenswrapper[4779]: I0929 09:49:35.192132 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="3074cf44-792f-4339-86f3-cd9dcdcd534f" containerName="ceilometer-notification-agent" Sep 29 09:49:35 crc kubenswrapper[4779]: I0929 09:49:35.193742 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 09:49:35 crc kubenswrapper[4779]: I0929 09:49:35.193868 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 09:49:35 crc kubenswrapper[4779]: I0929 09:49:35.196755 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 09:49:35 crc kubenswrapper[4779]: I0929 09:49:35.196935 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 09:49:35 crc kubenswrapper[4779]: I0929 09:49:35.197886 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Sep 29 09:49:35 crc kubenswrapper[4779]: I0929 09:49:35.225131 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dabe9a97-d078-4804-bbec-3942030e1914-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"dabe9a97-d078-4804-bbec-3942030e1914\") " pod="openstack/ceilometer-0" Sep 29 09:49:35 crc kubenswrapper[4779]: I0929 09:49:35.225194 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/dabe9a97-d078-4804-bbec-3942030e1914-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"dabe9a97-d078-4804-bbec-3942030e1914\") " pod="openstack/ceilometer-0" Sep 29 09:49:35 crc kubenswrapper[4779]: I0929 09:49:35.225238 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/dabe9a97-d078-4804-bbec-3942030e1914-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"dabe9a97-d078-4804-bbec-3942030e1914\") " pod="openstack/ceilometer-0" Sep 29 09:49:35 crc kubenswrapper[4779]: I0929 09:49:35.225290 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dabe9a97-d078-4804-bbec-3942030e1914-scripts\") pod \"ceilometer-0\" (UID: \"dabe9a97-d078-4804-bbec-3942030e1914\") " pod="openstack/ceilometer-0" Sep 29 09:49:35 crc kubenswrapper[4779]: I0929 09:49:35.225323 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dabe9a97-d078-4804-bbec-3942030e1914-run-httpd\") pod \"ceilometer-0\" (UID: \"dabe9a97-d078-4804-bbec-3942030e1914\") " pod="openstack/ceilometer-0" Sep 29 09:49:35 crc kubenswrapper[4779]: I0929 09:49:35.225381 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dabe9a97-d078-4804-bbec-3942030e1914-log-httpd\") pod \"ceilometer-0\" (UID: \"dabe9a97-d078-4804-bbec-3942030e1914\") " pod="openstack/ceilometer-0" Sep 29 09:49:35 crc kubenswrapper[4779]: I0929 09:49:35.225406 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vxpqf\" (UniqueName: \"kubernetes.io/projected/dabe9a97-d078-4804-bbec-3942030e1914-kube-api-access-vxpqf\") pod \"ceilometer-0\" (UID: \"dabe9a97-d078-4804-bbec-3942030e1914\") " pod="openstack/ceilometer-0" Sep 29 09:49:35 crc kubenswrapper[4779]: I0929 09:49:35.225428 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dabe9a97-d078-4804-bbec-3942030e1914-config-data\") pod \"ceilometer-0\" (UID: \"dabe9a97-d078-4804-bbec-3942030e1914\") " pod="openstack/ceilometer-0" Sep 29 09:49:35 crc kubenswrapper[4779]: I0929 09:49:35.243035 4779 scope.go:117] "RemoveContainer" containerID="6c85f90fc00feca86a2a212b751e91f2e1eae8f5977c2f66792caac9210521ea" Sep 29 09:49:35 crc kubenswrapper[4779]: I0929 09:49:35.327765 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dabe9a97-d078-4804-bbec-3942030e1914-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"dabe9a97-d078-4804-bbec-3942030e1914\") " pod="openstack/ceilometer-0" Sep 29 09:49:35 crc kubenswrapper[4779]: I0929 09:49:35.327868 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/dabe9a97-d078-4804-bbec-3942030e1914-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"dabe9a97-d078-4804-bbec-3942030e1914\") " pod="openstack/ceilometer-0" Sep 29 09:49:35 crc kubenswrapper[4779]: I0929 09:49:35.327989 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/dabe9a97-d078-4804-bbec-3942030e1914-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"dabe9a97-d078-4804-bbec-3942030e1914\") " pod="openstack/ceilometer-0" Sep 29 09:49:35 crc kubenswrapper[4779]: I0929 09:49:35.328053 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dabe9a97-d078-4804-bbec-3942030e1914-scripts\") pod \"ceilometer-0\" (UID: \"dabe9a97-d078-4804-bbec-3942030e1914\") " pod="openstack/ceilometer-0" Sep 29 09:49:35 crc kubenswrapper[4779]: I0929 09:49:35.328367 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dabe9a97-d078-4804-bbec-3942030e1914-run-httpd\") pod \"ceilometer-0\" (UID: \"dabe9a97-d078-4804-bbec-3942030e1914\") " pod="openstack/ceilometer-0" Sep 29 09:49:35 crc kubenswrapper[4779]: I0929 09:49:35.328468 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dabe9a97-d078-4804-bbec-3942030e1914-log-httpd\") pod \"ceilometer-0\" (UID: \"dabe9a97-d078-4804-bbec-3942030e1914\") " pod="openstack/ceilometer-0" Sep 29 09:49:35 crc kubenswrapper[4779]: I0929 09:49:35.328489 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vxpqf\" (UniqueName: \"kubernetes.io/projected/dabe9a97-d078-4804-bbec-3942030e1914-kube-api-access-vxpqf\") pod \"ceilometer-0\" (UID: \"dabe9a97-d078-4804-bbec-3942030e1914\") " pod="openstack/ceilometer-0" Sep 29 09:49:35 crc kubenswrapper[4779]: I0929 09:49:35.328509 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dabe9a97-d078-4804-bbec-3942030e1914-config-data\") pod \"ceilometer-0\" (UID: \"dabe9a97-d078-4804-bbec-3942030e1914\") " pod="openstack/ceilometer-0" Sep 29 09:49:35 crc kubenswrapper[4779]: I0929 09:49:35.329106 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dabe9a97-d078-4804-bbec-3942030e1914-log-httpd\") pod \"ceilometer-0\" (UID: \"dabe9a97-d078-4804-bbec-3942030e1914\") " pod="openstack/ceilometer-0" Sep 29 09:49:35 crc kubenswrapper[4779]: I0929 09:49:35.329423 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dabe9a97-d078-4804-bbec-3942030e1914-run-httpd\") pod \"ceilometer-0\" (UID: \"dabe9a97-d078-4804-bbec-3942030e1914\") " pod="openstack/ceilometer-0" Sep 29 09:49:35 crc kubenswrapper[4779]: I0929 09:49:35.331843 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dabe9a97-d078-4804-bbec-3942030e1914-scripts\") pod \"ceilometer-0\" (UID: \"dabe9a97-d078-4804-bbec-3942030e1914\") " pod="openstack/ceilometer-0" Sep 29 09:49:35 crc kubenswrapper[4779]: I0929 09:49:35.331886 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dabe9a97-d078-4804-bbec-3942030e1914-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"dabe9a97-d078-4804-bbec-3942030e1914\") " pod="openstack/ceilometer-0" Sep 29 09:49:35 crc kubenswrapper[4779]: I0929 09:49:35.332340 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/dabe9a97-d078-4804-bbec-3942030e1914-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"dabe9a97-d078-4804-bbec-3942030e1914\") " pod="openstack/ceilometer-0" Sep 29 09:49:35 crc kubenswrapper[4779]: I0929 09:49:35.333436 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dabe9a97-d078-4804-bbec-3942030e1914-config-data\") pod \"ceilometer-0\" (UID: \"dabe9a97-d078-4804-bbec-3942030e1914\") " pod="openstack/ceilometer-0" Sep 29 09:49:35 crc kubenswrapper[4779]: I0929 09:49:35.334205 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/dabe9a97-d078-4804-bbec-3942030e1914-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"dabe9a97-d078-4804-bbec-3942030e1914\") " pod="openstack/ceilometer-0" Sep 29 09:49:35 crc kubenswrapper[4779]: I0929 09:49:35.346996 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vxpqf\" (UniqueName: \"kubernetes.io/projected/dabe9a97-d078-4804-bbec-3942030e1914-kube-api-access-vxpqf\") pod \"ceilometer-0\" (UID: \"dabe9a97-d078-4804-bbec-3942030e1914\") " pod="openstack/ceilometer-0" Sep 29 09:49:35 crc kubenswrapper[4779]: I0929 09:49:35.539291 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 09:49:36 crc kubenswrapper[4779]: I0929 09:49:36.027155 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 09:49:36 crc kubenswrapper[4779]: W0929 09:49:36.030482 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddabe9a97_d078_4804_bbec_3942030e1914.slice/crio-37cae9a0c2f85462af9dc49fd1b6d7ece7dab014939f1c29e1251687639c9d01 WatchSource:0}: Error finding container 37cae9a0c2f85462af9dc49fd1b6d7ece7dab014939f1c29e1251687639c9d01: Status 404 returned error can't find the container with id 37cae9a0c2f85462af9dc49fd1b6d7ece7dab014939f1c29e1251687639c9d01 Sep 29 09:49:36 crc kubenswrapper[4779]: I0929 09:49:36.033677 4779 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 09:49:36 crc kubenswrapper[4779]: I0929 09:49:36.119133 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dabe9a97-d078-4804-bbec-3942030e1914","Type":"ContainerStarted","Data":"37cae9a0c2f85462af9dc49fd1b6d7ece7dab014939f1c29e1251687639c9d01"} Sep 29 09:49:36 crc kubenswrapper[4779]: I0929 09:49:36.672761 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Sep 29 09:49:36 crc kubenswrapper[4779]: I0929 09:49:36.693219 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Sep 29 09:49:36 crc kubenswrapper[4779]: I0929 09:49:36.729011 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3074cf44-792f-4339-86f3-cd9dcdcd534f" path="/var/lib/kubelet/pods/3074cf44-792f-4339-86f3-cd9dcdcd534f/volumes" Sep 29 09:49:37 crc kubenswrapper[4779]: I0929 09:49:37.136590 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dabe9a97-d078-4804-bbec-3942030e1914","Type":"ContainerStarted","Data":"60f949e7361d126047cae832755638c521d184f630c15707069949253c247b22"} Sep 29 09:49:37 crc kubenswrapper[4779]: I0929 09:49:37.154444 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Sep 29 09:49:37 crc kubenswrapper[4779]: I0929 09:49:37.347410 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-2hnns"] Sep 29 09:49:37 crc kubenswrapper[4779]: I0929 09:49:37.349573 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-2hnns" Sep 29 09:49:37 crc kubenswrapper[4779]: I0929 09:49:37.354100 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Sep 29 09:49:37 crc kubenswrapper[4779]: I0929 09:49:37.355376 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Sep 29 09:49:37 crc kubenswrapper[4779]: I0929 09:49:37.358984 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-2hnns"] Sep 29 09:49:37 crc kubenswrapper[4779]: I0929 09:49:37.481129 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/152ec30c-7a74-4fcd-a062-52f60d17756e-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-2hnns\" (UID: \"152ec30c-7a74-4fcd-a062-52f60d17756e\") " pod="openstack/nova-cell1-cell-mapping-2hnns" Sep 29 09:49:37 crc kubenswrapper[4779]: I0929 09:49:37.481248 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/152ec30c-7a74-4fcd-a062-52f60d17756e-config-data\") pod \"nova-cell1-cell-mapping-2hnns\" (UID: \"152ec30c-7a74-4fcd-a062-52f60d17756e\") " pod="openstack/nova-cell1-cell-mapping-2hnns" Sep 29 09:49:37 crc kubenswrapper[4779]: I0929 09:49:37.481361 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bs9zw\" (UniqueName: \"kubernetes.io/projected/152ec30c-7a74-4fcd-a062-52f60d17756e-kube-api-access-bs9zw\") pod \"nova-cell1-cell-mapping-2hnns\" (UID: \"152ec30c-7a74-4fcd-a062-52f60d17756e\") " pod="openstack/nova-cell1-cell-mapping-2hnns" Sep 29 09:49:37 crc kubenswrapper[4779]: I0929 09:49:37.481556 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/152ec30c-7a74-4fcd-a062-52f60d17756e-scripts\") pod \"nova-cell1-cell-mapping-2hnns\" (UID: \"152ec30c-7a74-4fcd-a062-52f60d17756e\") " pod="openstack/nova-cell1-cell-mapping-2hnns" Sep 29 09:49:37 crc kubenswrapper[4779]: I0929 09:49:37.583846 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/152ec30c-7a74-4fcd-a062-52f60d17756e-config-data\") pod \"nova-cell1-cell-mapping-2hnns\" (UID: \"152ec30c-7a74-4fcd-a062-52f60d17756e\") " pod="openstack/nova-cell1-cell-mapping-2hnns" Sep 29 09:49:37 crc kubenswrapper[4779]: I0929 09:49:37.583962 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bs9zw\" (UniqueName: \"kubernetes.io/projected/152ec30c-7a74-4fcd-a062-52f60d17756e-kube-api-access-bs9zw\") pod \"nova-cell1-cell-mapping-2hnns\" (UID: \"152ec30c-7a74-4fcd-a062-52f60d17756e\") " pod="openstack/nova-cell1-cell-mapping-2hnns" Sep 29 09:49:37 crc kubenswrapper[4779]: I0929 09:49:37.584115 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/152ec30c-7a74-4fcd-a062-52f60d17756e-scripts\") pod \"nova-cell1-cell-mapping-2hnns\" (UID: \"152ec30c-7a74-4fcd-a062-52f60d17756e\") " pod="openstack/nova-cell1-cell-mapping-2hnns" Sep 29 09:49:37 crc kubenswrapper[4779]: I0929 09:49:37.584275 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/152ec30c-7a74-4fcd-a062-52f60d17756e-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-2hnns\" (UID: \"152ec30c-7a74-4fcd-a062-52f60d17756e\") " pod="openstack/nova-cell1-cell-mapping-2hnns" Sep 29 09:49:37 crc kubenswrapper[4779]: I0929 09:49:37.591103 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/152ec30c-7a74-4fcd-a062-52f60d17756e-scripts\") pod \"nova-cell1-cell-mapping-2hnns\" (UID: \"152ec30c-7a74-4fcd-a062-52f60d17756e\") " pod="openstack/nova-cell1-cell-mapping-2hnns" Sep 29 09:49:37 crc kubenswrapper[4779]: I0929 09:49:37.598664 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/152ec30c-7a74-4fcd-a062-52f60d17756e-config-data\") pod \"nova-cell1-cell-mapping-2hnns\" (UID: \"152ec30c-7a74-4fcd-a062-52f60d17756e\") " pod="openstack/nova-cell1-cell-mapping-2hnns" Sep 29 09:49:37 crc kubenswrapper[4779]: I0929 09:49:37.609753 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bs9zw\" (UniqueName: \"kubernetes.io/projected/152ec30c-7a74-4fcd-a062-52f60d17756e-kube-api-access-bs9zw\") pod \"nova-cell1-cell-mapping-2hnns\" (UID: \"152ec30c-7a74-4fcd-a062-52f60d17756e\") " pod="openstack/nova-cell1-cell-mapping-2hnns" Sep 29 09:49:37 crc kubenswrapper[4779]: I0929 09:49:37.611673 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/152ec30c-7a74-4fcd-a062-52f60d17756e-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-2hnns\" (UID: \"152ec30c-7a74-4fcd-a062-52f60d17756e\") " pod="openstack/nova-cell1-cell-mapping-2hnns" Sep 29 09:49:37 crc kubenswrapper[4779]: I0929 09:49:37.682589 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-2hnns" Sep 29 09:49:38 crc kubenswrapper[4779]: I0929 09:49:38.150213 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dabe9a97-d078-4804-bbec-3942030e1914","Type":"ContainerStarted","Data":"484b48f4452025edca27b36e8d2de5906b92d2b033b4d54e926a08817d7d6679"} Sep 29 09:49:38 crc kubenswrapper[4779]: I0929 09:49:38.311839 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-2hnns"] Sep 29 09:49:38 crc kubenswrapper[4779]: I0929 09:49:38.514094 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6ddb8fc477-92j54" Sep 29 09:49:38 crc kubenswrapper[4779]: I0929 09:49:38.597568 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6c5b8df6f5-82kwj"] Sep 29 09:49:38 crc kubenswrapper[4779]: I0929 09:49:38.598676 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6c5b8df6f5-82kwj" podUID="2ad00777-208c-404d-bd36-c9b271156a3b" containerName="dnsmasq-dns" containerID="cri-o://fddeffb5978318ff4a7ea04690b0e1223843c5207c79250f72bf88f3f6593ef7" gracePeriod=10 Sep 29 09:49:38 crc kubenswrapper[4779]: E0929 09:49:38.822477 4779 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2ad00777_208c_404d_bd36_c9b271156a3b.slice/crio-fddeffb5978318ff4a7ea04690b0e1223843c5207c79250f72bf88f3f6593ef7.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2ad00777_208c_404d_bd36_c9b271156a3b.slice/crio-conmon-fddeffb5978318ff4a7ea04690b0e1223843c5207c79250f72bf88f3f6593ef7.scope\": RecentStats: unable to find data in memory cache]" Sep 29 09:49:39 crc kubenswrapper[4779]: I0929 09:49:39.162837 4779 generic.go:334] "Generic (PLEG): container finished" podID="2ad00777-208c-404d-bd36-c9b271156a3b" containerID="fddeffb5978318ff4a7ea04690b0e1223843c5207c79250f72bf88f3f6593ef7" exitCode=0 Sep 29 09:49:39 crc kubenswrapper[4779]: I0929 09:49:39.162930 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c5b8df6f5-82kwj" event={"ID":"2ad00777-208c-404d-bd36-c9b271156a3b","Type":"ContainerDied","Data":"fddeffb5978318ff4a7ea04690b0e1223843c5207c79250f72bf88f3f6593ef7"} Sep 29 09:49:39 crc kubenswrapper[4779]: I0929 09:49:39.163218 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c5b8df6f5-82kwj" event={"ID":"2ad00777-208c-404d-bd36-c9b271156a3b","Type":"ContainerDied","Data":"0a343c4dba34dda5eda393cafdad180ac60ed02c43d26ef4585710882db9928c"} Sep 29 09:49:39 crc kubenswrapper[4779]: I0929 09:49:39.163240 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0a343c4dba34dda5eda393cafdad180ac60ed02c43d26ef4585710882db9928c" Sep 29 09:49:39 crc kubenswrapper[4779]: I0929 09:49:39.166567 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-2hnns" event={"ID":"152ec30c-7a74-4fcd-a062-52f60d17756e","Type":"ContainerStarted","Data":"58a0cc1a64a0c547a70655d8d911d26f7464b3add82dc9910d0a2a9de01e72d5"} Sep 29 09:49:39 crc kubenswrapper[4779]: I0929 09:49:39.166605 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-2hnns" event={"ID":"152ec30c-7a74-4fcd-a062-52f60d17756e","Type":"ContainerStarted","Data":"344869b4638a3b5701509b7221450b37478efa5fa77abd94f0228f4351f4eff7"} Sep 29 09:49:39 crc kubenswrapper[4779]: I0929 09:49:39.170336 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dabe9a97-d078-4804-bbec-3942030e1914","Type":"ContainerStarted","Data":"7a9caa0b38c265d3378db0af12a5ae915320a3e21606d17f0d2f9e1715a92338"} Sep 29 09:49:39 crc kubenswrapper[4779]: I0929 09:49:39.207025 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-2hnns" podStartSLOduration=2.207007334 podStartE2EDuration="2.207007334s" podCreationTimestamp="2025-09-29 09:49:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:49:39.20129856 +0000 UTC m=+1211.182622474" watchObservedRunningTime="2025-09-29 09:49:39.207007334 +0000 UTC m=+1211.188331238" Sep 29 09:49:39 crc kubenswrapper[4779]: I0929 09:49:39.223073 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c5b8df6f5-82kwj" Sep 29 09:49:39 crc kubenswrapper[4779]: I0929 09:49:39.239173 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-775sg\" (UniqueName: \"kubernetes.io/projected/2ad00777-208c-404d-bd36-c9b271156a3b-kube-api-access-775sg\") pod \"2ad00777-208c-404d-bd36-c9b271156a3b\" (UID: \"2ad00777-208c-404d-bd36-c9b271156a3b\") " Sep 29 09:49:39 crc kubenswrapper[4779]: I0929 09:49:39.239298 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2ad00777-208c-404d-bd36-c9b271156a3b-ovsdbserver-nb\") pod \"2ad00777-208c-404d-bd36-c9b271156a3b\" (UID: \"2ad00777-208c-404d-bd36-c9b271156a3b\") " Sep 29 09:49:39 crc kubenswrapper[4779]: I0929 09:49:39.239358 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2ad00777-208c-404d-bd36-c9b271156a3b-ovsdbserver-sb\") pod \"2ad00777-208c-404d-bd36-c9b271156a3b\" (UID: \"2ad00777-208c-404d-bd36-c9b271156a3b\") " Sep 29 09:49:39 crc kubenswrapper[4779]: I0929 09:49:39.239387 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2ad00777-208c-404d-bd36-c9b271156a3b-dns-svc\") pod \"2ad00777-208c-404d-bd36-c9b271156a3b\" (UID: \"2ad00777-208c-404d-bd36-c9b271156a3b\") " Sep 29 09:49:39 crc kubenswrapper[4779]: I0929 09:49:39.239489 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2ad00777-208c-404d-bd36-c9b271156a3b-config\") pod \"2ad00777-208c-404d-bd36-c9b271156a3b\" (UID: \"2ad00777-208c-404d-bd36-c9b271156a3b\") " Sep 29 09:49:39 crc kubenswrapper[4779]: I0929 09:49:39.245491 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2ad00777-208c-404d-bd36-c9b271156a3b-kube-api-access-775sg" (OuterVolumeSpecName: "kube-api-access-775sg") pod "2ad00777-208c-404d-bd36-c9b271156a3b" (UID: "2ad00777-208c-404d-bd36-c9b271156a3b"). InnerVolumeSpecName "kube-api-access-775sg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:49:39 crc kubenswrapper[4779]: I0929 09:49:39.339712 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2ad00777-208c-404d-bd36-c9b271156a3b-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "2ad00777-208c-404d-bd36-c9b271156a3b" (UID: "2ad00777-208c-404d-bd36-c9b271156a3b"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:49:39 crc kubenswrapper[4779]: I0929 09:49:39.343321 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-775sg\" (UniqueName: \"kubernetes.io/projected/2ad00777-208c-404d-bd36-c9b271156a3b-kube-api-access-775sg\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:39 crc kubenswrapper[4779]: I0929 09:49:39.343359 4779 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2ad00777-208c-404d-bd36-c9b271156a3b-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:39 crc kubenswrapper[4779]: I0929 09:49:39.345439 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2ad00777-208c-404d-bd36-c9b271156a3b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "2ad00777-208c-404d-bd36-c9b271156a3b" (UID: "2ad00777-208c-404d-bd36-c9b271156a3b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:49:39 crc kubenswrapper[4779]: I0929 09:49:39.360630 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2ad00777-208c-404d-bd36-c9b271156a3b-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "2ad00777-208c-404d-bd36-c9b271156a3b" (UID: "2ad00777-208c-404d-bd36-c9b271156a3b"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:49:39 crc kubenswrapper[4779]: I0929 09:49:39.366854 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2ad00777-208c-404d-bd36-c9b271156a3b-config" (OuterVolumeSpecName: "config") pod "2ad00777-208c-404d-bd36-c9b271156a3b" (UID: "2ad00777-208c-404d-bd36-c9b271156a3b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:49:39 crc kubenswrapper[4779]: I0929 09:49:39.445544 4779 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2ad00777-208c-404d-bd36-c9b271156a3b-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:39 crc kubenswrapper[4779]: I0929 09:49:39.445976 4779 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2ad00777-208c-404d-bd36-c9b271156a3b-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:39 crc kubenswrapper[4779]: I0929 09:49:39.446002 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2ad00777-208c-404d-bd36-c9b271156a3b-config\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:40 crc kubenswrapper[4779]: I0929 09:49:40.178137 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c5b8df6f5-82kwj" Sep 29 09:49:40 crc kubenswrapper[4779]: I0929 09:49:40.212399 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6c5b8df6f5-82kwj"] Sep 29 09:49:40 crc kubenswrapper[4779]: I0929 09:49:40.220212 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6c5b8df6f5-82kwj"] Sep 29 09:49:40 crc kubenswrapper[4779]: I0929 09:49:40.730317 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2ad00777-208c-404d-bd36-c9b271156a3b" path="/var/lib/kubelet/pods/2ad00777-208c-404d-bd36-c9b271156a3b/volumes" Sep 29 09:49:41 crc kubenswrapper[4779]: I0929 09:49:41.192465 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dabe9a97-d078-4804-bbec-3942030e1914","Type":"ContainerStarted","Data":"25ca2cf159dc2d559ebb7faf6c4a7fa2634a1c089ddbd48fa95171e0d9a5881b"} Sep 29 09:49:41 crc kubenswrapper[4779]: I0929 09:49:41.193051 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 09:49:41 crc kubenswrapper[4779]: I0929 09:49:41.233472 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.879532282 podStartE2EDuration="6.233449683s" podCreationTimestamp="2025-09-29 09:49:35 +0000 UTC" firstStartedPulling="2025-09-29 09:49:36.033399943 +0000 UTC m=+1208.014723867" lastFinishedPulling="2025-09-29 09:49:40.387317364 +0000 UTC m=+1212.368641268" observedRunningTime="2025-09-29 09:49:41.220985036 +0000 UTC m=+1213.202308950" watchObservedRunningTime="2025-09-29 09:49:41.233449683 +0000 UTC m=+1213.214773587" Sep 29 09:49:43 crc kubenswrapper[4779]: I0929 09:49:43.479986 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 29 09:49:43 crc kubenswrapper[4779]: I0929 09:49:43.480312 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 29 09:49:44 crc kubenswrapper[4779]: I0929 09:49:44.222252 4779 generic.go:334] "Generic (PLEG): container finished" podID="152ec30c-7a74-4fcd-a062-52f60d17756e" containerID="58a0cc1a64a0c547a70655d8d911d26f7464b3add82dc9910d0a2a9de01e72d5" exitCode=0 Sep 29 09:49:44 crc kubenswrapper[4779]: I0929 09:49:44.222294 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-2hnns" event={"ID":"152ec30c-7a74-4fcd-a062-52f60d17756e","Type":"ContainerDied","Data":"58a0cc1a64a0c547a70655d8d911d26f7464b3add82dc9910d0a2a9de01e72d5"} Sep 29 09:49:44 crc kubenswrapper[4779]: I0929 09:49:44.493071 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="72361da2-53e5-428d-acb4-4c611f745f5d" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.204:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 09:49:44 crc kubenswrapper[4779]: I0929 09:49:44.493109 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="72361da2-53e5-428d-acb4-4c611f745f5d" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.204:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 09:49:45 crc kubenswrapper[4779]: I0929 09:49:45.686287 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-2hnns" Sep 29 09:49:45 crc kubenswrapper[4779]: I0929 09:49:45.774539 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/152ec30c-7a74-4fcd-a062-52f60d17756e-scripts\") pod \"152ec30c-7a74-4fcd-a062-52f60d17756e\" (UID: \"152ec30c-7a74-4fcd-a062-52f60d17756e\") " Sep 29 09:49:45 crc kubenswrapper[4779]: I0929 09:49:45.774797 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/152ec30c-7a74-4fcd-a062-52f60d17756e-combined-ca-bundle\") pod \"152ec30c-7a74-4fcd-a062-52f60d17756e\" (UID: \"152ec30c-7a74-4fcd-a062-52f60d17756e\") " Sep 29 09:49:45 crc kubenswrapper[4779]: I0929 09:49:45.774939 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/152ec30c-7a74-4fcd-a062-52f60d17756e-config-data\") pod \"152ec30c-7a74-4fcd-a062-52f60d17756e\" (UID: \"152ec30c-7a74-4fcd-a062-52f60d17756e\") " Sep 29 09:49:45 crc kubenswrapper[4779]: I0929 09:49:45.775037 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bs9zw\" (UniqueName: \"kubernetes.io/projected/152ec30c-7a74-4fcd-a062-52f60d17756e-kube-api-access-bs9zw\") pod \"152ec30c-7a74-4fcd-a062-52f60d17756e\" (UID: \"152ec30c-7a74-4fcd-a062-52f60d17756e\") " Sep 29 09:49:45 crc kubenswrapper[4779]: I0929 09:49:45.780390 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/152ec30c-7a74-4fcd-a062-52f60d17756e-scripts" (OuterVolumeSpecName: "scripts") pod "152ec30c-7a74-4fcd-a062-52f60d17756e" (UID: "152ec30c-7a74-4fcd-a062-52f60d17756e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:49:45 crc kubenswrapper[4779]: I0929 09:49:45.780452 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/152ec30c-7a74-4fcd-a062-52f60d17756e-kube-api-access-bs9zw" (OuterVolumeSpecName: "kube-api-access-bs9zw") pod "152ec30c-7a74-4fcd-a062-52f60d17756e" (UID: "152ec30c-7a74-4fcd-a062-52f60d17756e"). InnerVolumeSpecName "kube-api-access-bs9zw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:49:45 crc kubenswrapper[4779]: I0929 09:49:45.811093 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/152ec30c-7a74-4fcd-a062-52f60d17756e-config-data" (OuterVolumeSpecName: "config-data") pod "152ec30c-7a74-4fcd-a062-52f60d17756e" (UID: "152ec30c-7a74-4fcd-a062-52f60d17756e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:49:45 crc kubenswrapper[4779]: I0929 09:49:45.814308 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/152ec30c-7a74-4fcd-a062-52f60d17756e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "152ec30c-7a74-4fcd-a062-52f60d17756e" (UID: "152ec30c-7a74-4fcd-a062-52f60d17756e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:49:45 crc kubenswrapper[4779]: I0929 09:49:45.877002 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/152ec30c-7a74-4fcd-a062-52f60d17756e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:45 crc kubenswrapper[4779]: I0929 09:49:45.877051 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/152ec30c-7a74-4fcd-a062-52f60d17756e-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:45 crc kubenswrapper[4779]: I0929 09:49:45.877066 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bs9zw\" (UniqueName: \"kubernetes.io/projected/152ec30c-7a74-4fcd-a062-52f60d17756e-kube-api-access-bs9zw\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:45 crc kubenswrapper[4779]: I0929 09:49:45.877083 4779 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/152ec30c-7a74-4fcd-a062-52f60d17756e-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:46 crc kubenswrapper[4779]: I0929 09:49:46.268189 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-2hnns" event={"ID":"152ec30c-7a74-4fcd-a062-52f60d17756e","Type":"ContainerDied","Data":"344869b4638a3b5701509b7221450b37478efa5fa77abd94f0228f4351f4eff7"} Sep 29 09:49:46 crc kubenswrapper[4779]: I0929 09:49:46.268471 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="344869b4638a3b5701509b7221450b37478efa5fa77abd94f0228f4351f4eff7" Sep 29 09:49:46 crc kubenswrapper[4779]: I0929 09:49:46.268265 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-2hnns" Sep 29 09:49:46 crc kubenswrapper[4779]: I0929 09:49:46.433208 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 29 09:49:46 crc kubenswrapper[4779]: I0929 09:49:46.433480 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="72361da2-53e5-428d-acb4-4c611f745f5d" containerName="nova-api-log" containerID="cri-o://320c392276452770ec0d97fb0c15d202fa3b321f41d1166b38eeebaf53f740e2" gracePeriod=30 Sep 29 09:49:46 crc kubenswrapper[4779]: I0929 09:49:46.433542 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="72361da2-53e5-428d-acb4-4c611f745f5d" containerName="nova-api-api" containerID="cri-o://ce5308a7b91d9426121f3efc328e2d75be4e7c7ec8c960a612bc5ef9d20712de" gracePeriod=30 Sep 29 09:49:46 crc kubenswrapper[4779]: I0929 09:49:46.447181 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 09:49:46 crc kubenswrapper[4779]: I0929 09:49:46.447419 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="d517750c-87ac-4c0a-b80e-705b03317ba0" containerName="nova-scheduler-scheduler" containerID="cri-o://45590e4cbae05a9131d9d7c26aece6f8431c36aa4e7b7accd758ad519c06cd6b" gracePeriod=30 Sep 29 09:49:46 crc kubenswrapper[4779]: I0929 09:49:46.491977 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 09:49:46 crc kubenswrapper[4779]: I0929 09:49:46.492813 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="41eb7f46-6e65-479d-bca0-29471323c4d7" containerName="nova-metadata-metadata" containerID="cri-o://edab7e3eabec018a4960fe536ec9cb683f6a0099b8f56f4a012fee82226ced82" gracePeriod=30 Sep 29 09:49:46 crc kubenswrapper[4779]: I0929 09:49:46.493002 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="41eb7f46-6e65-479d-bca0-29471323c4d7" containerName="nova-metadata-log" containerID="cri-o://b88bdedb6344915e35f7a221321cd69556c3178625c268d548d518e542f07dc9" gracePeriod=30 Sep 29 09:49:46 crc kubenswrapper[4779]: I0929 09:49:46.966578 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 09:49:46 crc kubenswrapper[4779]: I0929 09:49:46.966643 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 09:49:47 crc kubenswrapper[4779]: I0929 09:49:47.280367 4779 generic.go:334] "Generic (PLEG): container finished" podID="72361da2-53e5-428d-acb4-4c611f745f5d" containerID="320c392276452770ec0d97fb0c15d202fa3b321f41d1166b38eeebaf53f740e2" exitCode=143 Sep 29 09:49:47 crc kubenswrapper[4779]: I0929 09:49:47.280561 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"72361da2-53e5-428d-acb4-4c611f745f5d","Type":"ContainerDied","Data":"320c392276452770ec0d97fb0c15d202fa3b321f41d1166b38eeebaf53f740e2"} Sep 29 09:49:47 crc kubenswrapper[4779]: I0929 09:49:47.283331 4779 generic.go:334] "Generic (PLEG): container finished" podID="d517750c-87ac-4c0a-b80e-705b03317ba0" containerID="45590e4cbae05a9131d9d7c26aece6f8431c36aa4e7b7accd758ad519c06cd6b" exitCode=0 Sep 29 09:49:47 crc kubenswrapper[4779]: I0929 09:49:47.283377 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d517750c-87ac-4c0a-b80e-705b03317ba0","Type":"ContainerDied","Data":"45590e4cbae05a9131d9d7c26aece6f8431c36aa4e7b7accd758ad519c06cd6b"} Sep 29 09:49:47 crc kubenswrapper[4779]: I0929 09:49:47.285452 4779 generic.go:334] "Generic (PLEG): container finished" podID="41eb7f46-6e65-479d-bca0-29471323c4d7" containerID="b88bdedb6344915e35f7a221321cd69556c3178625c268d548d518e542f07dc9" exitCode=143 Sep 29 09:49:47 crc kubenswrapper[4779]: I0929 09:49:47.285525 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"41eb7f46-6e65-479d-bca0-29471323c4d7","Type":"ContainerDied","Data":"b88bdedb6344915e35f7a221321cd69556c3178625c268d548d518e542f07dc9"} Sep 29 09:49:47 crc kubenswrapper[4779]: I0929 09:49:47.687811 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 09:49:47 crc kubenswrapper[4779]: I0929 09:49:47.711469 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nwdnd\" (UniqueName: \"kubernetes.io/projected/d517750c-87ac-4c0a-b80e-705b03317ba0-kube-api-access-nwdnd\") pod \"d517750c-87ac-4c0a-b80e-705b03317ba0\" (UID: \"d517750c-87ac-4c0a-b80e-705b03317ba0\") " Sep 29 09:49:47 crc kubenswrapper[4779]: I0929 09:49:47.711582 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d517750c-87ac-4c0a-b80e-705b03317ba0-config-data\") pod \"d517750c-87ac-4c0a-b80e-705b03317ba0\" (UID: \"d517750c-87ac-4c0a-b80e-705b03317ba0\") " Sep 29 09:49:47 crc kubenswrapper[4779]: I0929 09:49:47.711696 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d517750c-87ac-4c0a-b80e-705b03317ba0-combined-ca-bundle\") pod \"d517750c-87ac-4c0a-b80e-705b03317ba0\" (UID: \"d517750c-87ac-4c0a-b80e-705b03317ba0\") " Sep 29 09:49:47 crc kubenswrapper[4779]: I0929 09:49:47.732994 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d517750c-87ac-4c0a-b80e-705b03317ba0-kube-api-access-nwdnd" (OuterVolumeSpecName: "kube-api-access-nwdnd") pod "d517750c-87ac-4c0a-b80e-705b03317ba0" (UID: "d517750c-87ac-4c0a-b80e-705b03317ba0"). InnerVolumeSpecName "kube-api-access-nwdnd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:49:47 crc kubenswrapper[4779]: I0929 09:49:47.755882 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d517750c-87ac-4c0a-b80e-705b03317ba0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d517750c-87ac-4c0a-b80e-705b03317ba0" (UID: "d517750c-87ac-4c0a-b80e-705b03317ba0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:49:47 crc kubenswrapper[4779]: I0929 09:49:47.791040 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d517750c-87ac-4c0a-b80e-705b03317ba0-config-data" (OuterVolumeSpecName: "config-data") pod "d517750c-87ac-4c0a-b80e-705b03317ba0" (UID: "d517750c-87ac-4c0a-b80e-705b03317ba0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:49:47 crc kubenswrapper[4779]: I0929 09:49:47.813529 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d517750c-87ac-4c0a-b80e-705b03317ba0-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:47 crc kubenswrapper[4779]: I0929 09:49:47.813564 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d517750c-87ac-4c0a-b80e-705b03317ba0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:47 crc kubenswrapper[4779]: I0929 09:49:47.813575 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nwdnd\" (UniqueName: \"kubernetes.io/projected/d517750c-87ac-4c0a-b80e-705b03317ba0-kube-api-access-nwdnd\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:47 crc kubenswrapper[4779]: I0929 09:49:47.965591 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 09:49:47 crc kubenswrapper[4779]: I0929 09:49:47.980389 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.020892 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f5xft\" (UniqueName: \"kubernetes.io/projected/41eb7f46-6e65-479d-bca0-29471323c4d7-kube-api-access-f5xft\") pod \"41eb7f46-6e65-479d-bca0-29471323c4d7\" (UID: \"41eb7f46-6e65-479d-bca0-29471323c4d7\") " Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.020944 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/72361da2-53e5-428d-acb4-4c611f745f5d-public-tls-certs\") pod \"72361da2-53e5-428d-acb4-4c611f745f5d\" (UID: \"72361da2-53e5-428d-acb4-4c611f745f5d\") " Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.020974 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/72361da2-53e5-428d-acb4-4c611f745f5d-logs\") pod \"72361da2-53e5-428d-acb4-4c611f745f5d\" (UID: \"72361da2-53e5-428d-acb4-4c611f745f5d\") " Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.021042 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/72361da2-53e5-428d-acb4-4c611f745f5d-internal-tls-certs\") pod \"72361da2-53e5-428d-acb4-4c611f745f5d\" (UID: \"72361da2-53e5-428d-acb4-4c611f745f5d\") " Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.021092 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/41eb7f46-6e65-479d-bca0-29471323c4d7-nova-metadata-tls-certs\") pod \"41eb7f46-6e65-479d-bca0-29471323c4d7\" (UID: \"41eb7f46-6e65-479d-bca0-29471323c4d7\") " Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.021140 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xx2ck\" (UniqueName: \"kubernetes.io/projected/72361da2-53e5-428d-acb4-4c611f745f5d-kube-api-access-xx2ck\") pod \"72361da2-53e5-428d-acb4-4c611f745f5d\" (UID: \"72361da2-53e5-428d-acb4-4c611f745f5d\") " Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.021209 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72361da2-53e5-428d-acb4-4c611f745f5d-config-data\") pod \"72361da2-53e5-428d-acb4-4c611f745f5d\" (UID: \"72361da2-53e5-428d-acb4-4c611f745f5d\") " Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.021243 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/41eb7f46-6e65-479d-bca0-29471323c4d7-logs\") pod \"41eb7f46-6e65-479d-bca0-29471323c4d7\" (UID: \"41eb7f46-6e65-479d-bca0-29471323c4d7\") " Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.021259 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41eb7f46-6e65-479d-bca0-29471323c4d7-combined-ca-bundle\") pod \"41eb7f46-6e65-479d-bca0-29471323c4d7\" (UID: \"41eb7f46-6e65-479d-bca0-29471323c4d7\") " Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.021287 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72361da2-53e5-428d-acb4-4c611f745f5d-combined-ca-bundle\") pod \"72361da2-53e5-428d-acb4-4c611f745f5d\" (UID: \"72361da2-53e5-428d-acb4-4c611f745f5d\") " Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.021317 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41eb7f46-6e65-479d-bca0-29471323c4d7-config-data\") pod \"41eb7f46-6e65-479d-bca0-29471323c4d7\" (UID: \"41eb7f46-6e65-479d-bca0-29471323c4d7\") " Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.022456 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/72361da2-53e5-428d-acb4-4c611f745f5d-logs" (OuterVolumeSpecName: "logs") pod "72361da2-53e5-428d-acb4-4c611f745f5d" (UID: "72361da2-53e5-428d-acb4-4c611f745f5d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.026734 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/41eb7f46-6e65-479d-bca0-29471323c4d7-logs" (OuterVolumeSpecName: "logs") pod "41eb7f46-6e65-479d-bca0-29471323c4d7" (UID: "41eb7f46-6e65-479d-bca0-29471323c4d7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.047102 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/41eb7f46-6e65-479d-bca0-29471323c4d7-kube-api-access-f5xft" (OuterVolumeSpecName: "kube-api-access-f5xft") pod "41eb7f46-6e65-479d-bca0-29471323c4d7" (UID: "41eb7f46-6e65-479d-bca0-29471323c4d7"). InnerVolumeSpecName "kube-api-access-f5xft". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.047295 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/72361da2-53e5-428d-acb4-4c611f745f5d-kube-api-access-xx2ck" (OuterVolumeSpecName: "kube-api-access-xx2ck") pod "72361da2-53e5-428d-acb4-4c611f745f5d" (UID: "72361da2-53e5-428d-acb4-4c611f745f5d"). InnerVolumeSpecName "kube-api-access-xx2ck". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.050629 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/72361da2-53e5-428d-acb4-4c611f745f5d-config-data" (OuterVolumeSpecName: "config-data") pod "72361da2-53e5-428d-acb4-4c611f745f5d" (UID: "72361da2-53e5-428d-acb4-4c611f745f5d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.055020 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/41eb7f46-6e65-479d-bca0-29471323c4d7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "41eb7f46-6e65-479d-bca0-29471323c4d7" (UID: "41eb7f46-6e65-479d-bca0-29471323c4d7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.079724 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/41eb7f46-6e65-479d-bca0-29471323c4d7-config-data" (OuterVolumeSpecName: "config-data") pod "41eb7f46-6e65-479d-bca0-29471323c4d7" (UID: "41eb7f46-6e65-479d-bca0-29471323c4d7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.082430 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/72361da2-53e5-428d-acb4-4c611f745f5d-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "72361da2-53e5-428d-acb4-4c611f745f5d" (UID: "72361da2-53e5-428d-acb4-4c611f745f5d"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.087714 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/72361da2-53e5-428d-acb4-4c611f745f5d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "72361da2-53e5-428d-acb4-4c611f745f5d" (UID: "72361da2-53e5-428d-acb4-4c611f745f5d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.092729 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/72361da2-53e5-428d-acb4-4c611f745f5d-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "72361da2-53e5-428d-acb4-4c611f745f5d" (UID: "72361da2-53e5-428d-acb4-4c611f745f5d"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.094106 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/41eb7f46-6e65-479d-bca0-29471323c4d7-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "41eb7f46-6e65-479d-bca0-29471323c4d7" (UID: "41eb7f46-6e65-479d-bca0-29471323c4d7"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.123542 4779 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/72361da2-53e5-428d-acb4-4c611f745f5d-logs\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.123580 4779 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/72361da2-53e5-428d-acb4-4c611f745f5d-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.123594 4779 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/41eb7f46-6e65-479d-bca0-29471323c4d7-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.123609 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xx2ck\" (UniqueName: \"kubernetes.io/projected/72361da2-53e5-428d-acb4-4c611f745f5d-kube-api-access-xx2ck\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.123620 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72361da2-53e5-428d-acb4-4c611f745f5d-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.123633 4779 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/41eb7f46-6e65-479d-bca0-29471323c4d7-logs\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.123643 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41eb7f46-6e65-479d-bca0-29471323c4d7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.123653 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72361da2-53e5-428d-acb4-4c611f745f5d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.123663 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41eb7f46-6e65-479d-bca0-29471323c4d7-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.123674 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f5xft\" (UniqueName: \"kubernetes.io/projected/41eb7f46-6e65-479d-bca0-29471323c4d7-kube-api-access-f5xft\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.123684 4779 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/72361da2-53e5-428d-acb4-4c611f745f5d-public-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.301744 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d517750c-87ac-4c0a-b80e-705b03317ba0","Type":"ContainerDied","Data":"e0e9cc5cd7c3ad38ea8821f34fa2e2ead7bf985ea42b35df3bdfa09340aaf0f3"} Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.302070 4779 scope.go:117] "RemoveContainer" containerID="45590e4cbae05a9131d9d7c26aece6f8431c36aa4e7b7accd758ad519c06cd6b" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.301952 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.313024 4779 generic.go:334] "Generic (PLEG): container finished" podID="41eb7f46-6e65-479d-bca0-29471323c4d7" containerID="edab7e3eabec018a4960fe536ec9cb683f6a0099b8f56f4a012fee82226ced82" exitCode=0 Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.313116 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.313116 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"41eb7f46-6e65-479d-bca0-29471323c4d7","Type":"ContainerDied","Data":"edab7e3eabec018a4960fe536ec9cb683f6a0099b8f56f4a012fee82226ced82"} Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.313422 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"41eb7f46-6e65-479d-bca0-29471323c4d7","Type":"ContainerDied","Data":"da2f1011f73a159b4323d5c2b87c749a642d6a13c1c8744890b61d856b33c307"} Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.333525 4779 generic.go:334] "Generic (PLEG): container finished" podID="72361da2-53e5-428d-acb4-4c611f745f5d" containerID="ce5308a7b91d9426121f3efc328e2d75be4e7c7ec8c960a612bc5ef9d20712de" exitCode=0 Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.333592 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"72361da2-53e5-428d-acb4-4c611f745f5d","Type":"ContainerDied","Data":"ce5308a7b91d9426121f3efc328e2d75be4e7c7ec8c960a612bc5ef9d20712de"} Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.333621 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"72361da2-53e5-428d-acb4-4c611f745f5d","Type":"ContainerDied","Data":"870743f8df281741d12bcc12b792a67c6963987b6a197233e5ab76fcc5b0ab17"} Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.333712 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.335804 4779 scope.go:117] "RemoveContainer" containerID="edab7e3eabec018a4960fe536ec9cb683f6a0099b8f56f4a012fee82226ced82" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.350229 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.369482 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.378835 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.410283 4779 scope.go:117] "RemoveContainer" containerID="b88bdedb6344915e35f7a221321cd69556c3178625c268d548d518e542f07dc9" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.410414 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.416848 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 09:49:48 crc kubenswrapper[4779]: E0929 09:49:48.418792 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41eb7f46-6e65-479d-bca0-29471323c4d7" containerName="nova-metadata-metadata" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.418807 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="41eb7f46-6e65-479d-bca0-29471323c4d7" containerName="nova-metadata-metadata" Sep 29 09:49:48 crc kubenswrapper[4779]: E0929 09:49:48.418823 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d517750c-87ac-4c0a-b80e-705b03317ba0" containerName="nova-scheduler-scheduler" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.418831 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="d517750c-87ac-4c0a-b80e-705b03317ba0" containerName="nova-scheduler-scheduler" Sep 29 09:49:48 crc kubenswrapper[4779]: E0929 09:49:48.418850 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41eb7f46-6e65-479d-bca0-29471323c4d7" containerName="nova-metadata-log" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.418868 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="41eb7f46-6e65-479d-bca0-29471323c4d7" containerName="nova-metadata-log" Sep 29 09:49:48 crc kubenswrapper[4779]: E0929 09:49:48.418887 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ad00777-208c-404d-bd36-c9b271156a3b" containerName="dnsmasq-dns" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.418892 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ad00777-208c-404d-bd36-c9b271156a3b" containerName="dnsmasq-dns" Sep 29 09:49:48 crc kubenswrapper[4779]: E0929 09:49:48.418917 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="152ec30c-7a74-4fcd-a062-52f60d17756e" containerName="nova-manage" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.418923 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="152ec30c-7a74-4fcd-a062-52f60d17756e" containerName="nova-manage" Sep 29 09:49:48 crc kubenswrapper[4779]: E0929 09:49:48.418936 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ad00777-208c-404d-bd36-c9b271156a3b" containerName="init" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.418943 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ad00777-208c-404d-bd36-c9b271156a3b" containerName="init" Sep 29 09:49:48 crc kubenswrapper[4779]: E0929 09:49:48.418952 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72361da2-53e5-428d-acb4-4c611f745f5d" containerName="nova-api-log" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.418958 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="72361da2-53e5-428d-acb4-4c611f745f5d" containerName="nova-api-log" Sep 29 09:49:48 crc kubenswrapper[4779]: E0929 09:49:48.418977 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72361da2-53e5-428d-acb4-4c611f745f5d" containerName="nova-api-api" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.418997 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="72361da2-53e5-428d-acb4-4c611f745f5d" containerName="nova-api-api" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.419194 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="d517750c-87ac-4c0a-b80e-705b03317ba0" containerName="nova-scheduler-scheduler" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.419204 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="41eb7f46-6e65-479d-bca0-29471323c4d7" containerName="nova-metadata-log" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.419233 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="72361da2-53e5-428d-acb4-4c611f745f5d" containerName="nova-api-api" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.419241 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ad00777-208c-404d-bd36-c9b271156a3b" containerName="dnsmasq-dns" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.419254 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="41eb7f46-6e65-479d-bca0-29471323c4d7" containerName="nova-metadata-metadata" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.419264 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="72361da2-53e5-428d-acb4-4c611f745f5d" containerName="nova-api-log" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.419274 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="152ec30c-7a74-4fcd-a062-52f60d17756e" containerName="nova-manage" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.420046 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.428351 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.434229 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.463712 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.468488 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.474942 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.475437 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.497116 4779 scope.go:117] "RemoveContainer" containerID="edab7e3eabec018a4960fe536ec9cb683f6a0099b8f56f4a012fee82226ced82" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.497535 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 29 09:49:48 crc kubenswrapper[4779]: E0929 09:49:48.499644 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"edab7e3eabec018a4960fe536ec9cb683f6a0099b8f56f4a012fee82226ced82\": container with ID starting with edab7e3eabec018a4960fe536ec9cb683f6a0099b8f56f4a012fee82226ced82 not found: ID does not exist" containerID="edab7e3eabec018a4960fe536ec9cb683f6a0099b8f56f4a012fee82226ced82" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.499972 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"edab7e3eabec018a4960fe536ec9cb683f6a0099b8f56f4a012fee82226ced82"} err="failed to get container status \"edab7e3eabec018a4960fe536ec9cb683f6a0099b8f56f4a012fee82226ced82\": rpc error: code = NotFound desc = could not find container \"edab7e3eabec018a4960fe536ec9cb683f6a0099b8f56f4a012fee82226ced82\": container with ID starting with edab7e3eabec018a4960fe536ec9cb683f6a0099b8f56f4a012fee82226ced82 not found: ID does not exist" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.502200 4779 scope.go:117] "RemoveContainer" containerID="b88bdedb6344915e35f7a221321cd69556c3178625c268d548d518e542f07dc9" Sep 29 09:49:48 crc kubenswrapper[4779]: E0929 09:49:48.507113 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b88bdedb6344915e35f7a221321cd69556c3178625c268d548d518e542f07dc9\": container with ID starting with b88bdedb6344915e35f7a221321cd69556c3178625c268d548d518e542f07dc9 not found: ID does not exist" containerID="b88bdedb6344915e35f7a221321cd69556c3178625c268d548d518e542f07dc9" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.507156 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b88bdedb6344915e35f7a221321cd69556c3178625c268d548d518e542f07dc9"} err="failed to get container status \"b88bdedb6344915e35f7a221321cd69556c3178625c268d548d518e542f07dc9\": rpc error: code = NotFound desc = could not find container \"b88bdedb6344915e35f7a221321cd69556c3178625c268d548d518e542f07dc9\": container with ID starting with b88bdedb6344915e35f7a221321cd69556c3178625c268d548d518e542f07dc9 not found: ID does not exist" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.507201 4779 scope.go:117] "RemoveContainer" containerID="ce5308a7b91d9426121f3efc328e2d75be4e7c7ec8c960a612bc5ef9d20712de" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.515443 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.525930 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.535608 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b34ff414-f728-4ce2-98a5-cc4e005a0a3c-config-data\") pod \"nova-scheduler-0\" (UID: \"b34ff414-f728-4ce2-98a5-cc4e005a0a3c\") " pod="openstack/nova-scheduler-0" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.535660 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/087223b2-e95e-4876-8bb1-d9fa4cab5575-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"087223b2-e95e-4876-8bb1-d9fa4cab5575\") " pod="openstack/nova-metadata-0" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.535679 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/087223b2-e95e-4876-8bb1-d9fa4cab5575-config-data\") pod \"nova-metadata-0\" (UID: \"087223b2-e95e-4876-8bb1-d9fa4cab5575\") " pod="openstack/nova-metadata-0" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.535891 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-djd2j\" (UniqueName: \"kubernetes.io/projected/b34ff414-f728-4ce2-98a5-cc4e005a0a3c-kube-api-access-djd2j\") pod \"nova-scheduler-0\" (UID: \"b34ff414-f728-4ce2-98a5-cc4e005a0a3c\") " pod="openstack/nova-scheduler-0" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.535958 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/087223b2-e95e-4876-8bb1-d9fa4cab5575-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"087223b2-e95e-4876-8bb1-d9fa4cab5575\") " pod="openstack/nova-metadata-0" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.536039 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b34ff414-f728-4ce2-98a5-cc4e005a0a3c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"b34ff414-f728-4ce2-98a5-cc4e005a0a3c\") " pod="openstack/nova-scheduler-0" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.536129 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/087223b2-e95e-4876-8bb1-d9fa4cab5575-logs\") pod \"nova-metadata-0\" (UID: \"087223b2-e95e-4876-8bb1-d9fa4cab5575\") " pod="openstack/nova-metadata-0" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.536239 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ck5bp\" (UniqueName: \"kubernetes.io/projected/087223b2-e95e-4876-8bb1-d9fa4cab5575-kube-api-access-ck5bp\") pod \"nova-metadata-0\" (UID: \"087223b2-e95e-4876-8bb1-d9fa4cab5575\") " pod="openstack/nova-metadata-0" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.538199 4779 scope.go:117] "RemoveContainer" containerID="320c392276452770ec0d97fb0c15d202fa3b321f41d1166b38eeebaf53f740e2" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.541752 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.543529 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.547285 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.547494 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.547614 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.554468 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.572044 4779 scope.go:117] "RemoveContainer" containerID="ce5308a7b91d9426121f3efc328e2d75be4e7c7ec8c960a612bc5ef9d20712de" Sep 29 09:49:48 crc kubenswrapper[4779]: E0929 09:49:48.572557 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ce5308a7b91d9426121f3efc328e2d75be4e7c7ec8c960a612bc5ef9d20712de\": container with ID starting with ce5308a7b91d9426121f3efc328e2d75be4e7c7ec8c960a612bc5ef9d20712de not found: ID does not exist" containerID="ce5308a7b91d9426121f3efc328e2d75be4e7c7ec8c960a612bc5ef9d20712de" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.572638 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ce5308a7b91d9426121f3efc328e2d75be4e7c7ec8c960a612bc5ef9d20712de"} err="failed to get container status \"ce5308a7b91d9426121f3efc328e2d75be4e7c7ec8c960a612bc5ef9d20712de\": rpc error: code = NotFound desc = could not find container \"ce5308a7b91d9426121f3efc328e2d75be4e7c7ec8c960a612bc5ef9d20712de\": container with ID starting with ce5308a7b91d9426121f3efc328e2d75be4e7c7ec8c960a612bc5ef9d20712de not found: ID does not exist" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.572667 4779 scope.go:117] "RemoveContainer" containerID="320c392276452770ec0d97fb0c15d202fa3b321f41d1166b38eeebaf53f740e2" Sep 29 09:49:48 crc kubenswrapper[4779]: E0929 09:49:48.573178 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"320c392276452770ec0d97fb0c15d202fa3b321f41d1166b38eeebaf53f740e2\": container with ID starting with 320c392276452770ec0d97fb0c15d202fa3b321f41d1166b38eeebaf53f740e2 not found: ID does not exist" containerID="320c392276452770ec0d97fb0c15d202fa3b321f41d1166b38eeebaf53f740e2" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.573274 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"320c392276452770ec0d97fb0c15d202fa3b321f41d1166b38eeebaf53f740e2"} err="failed to get container status \"320c392276452770ec0d97fb0c15d202fa3b321f41d1166b38eeebaf53f740e2\": rpc error: code = NotFound desc = could not find container \"320c392276452770ec0d97fb0c15d202fa3b321f41d1166b38eeebaf53f740e2\": container with ID starting with 320c392276452770ec0d97fb0c15d202fa3b321f41d1166b38eeebaf53f740e2 not found: ID does not exist" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.638262 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-djd2j\" (UniqueName: \"kubernetes.io/projected/b34ff414-f728-4ce2-98a5-cc4e005a0a3c-kube-api-access-djd2j\") pod \"nova-scheduler-0\" (UID: \"b34ff414-f728-4ce2-98a5-cc4e005a0a3c\") " pod="openstack/nova-scheduler-0" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.638542 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/087223b2-e95e-4876-8bb1-d9fa4cab5575-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"087223b2-e95e-4876-8bb1-d9fa4cab5575\") " pod="openstack/nova-metadata-0" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.638668 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b34ff414-f728-4ce2-98a5-cc4e005a0a3c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"b34ff414-f728-4ce2-98a5-cc4e005a0a3c\") " pod="openstack/nova-scheduler-0" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.638753 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mmlhp\" (UniqueName: \"kubernetes.io/projected/01d9cfc0-d700-4859-84e9-66ebd1047e97-kube-api-access-mmlhp\") pod \"nova-api-0\" (UID: \"01d9cfc0-d700-4859-84e9-66ebd1047e97\") " pod="openstack/nova-api-0" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.638858 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/087223b2-e95e-4876-8bb1-d9fa4cab5575-logs\") pod \"nova-metadata-0\" (UID: \"087223b2-e95e-4876-8bb1-d9fa4cab5575\") " pod="openstack/nova-metadata-0" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.639011 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/01d9cfc0-d700-4859-84e9-66ebd1047e97-internal-tls-certs\") pod \"nova-api-0\" (UID: \"01d9cfc0-d700-4859-84e9-66ebd1047e97\") " pod="openstack/nova-api-0" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.639129 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/01d9cfc0-d700-4859-84e9-66ebd1047e97-logs\") pod \"nova-api-0\" (UID: \"01d9cfc0-d700-4859-84e9-66ebd1047e97\") " pod="openstack/nova-api-0" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.639240 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ck5bp\" (UniqueName: \"kubernetes.io/projected/087223b2-e95e-4876-8bb1-d9fa4cab5575-kube-api-access-ck5bp\") pod \"nova-metadata-0\" (UID: \"087223b2-e95e-4876-8bb1-d9fa4cab5575\") " pod="openstack/nova-metadata-0" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.639342 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/01d9cfc0-d700-4859-84e9-66ebd1047e97-config-data\") pod \"nova-api-0\" (UID: \"01d9cfc0-d700-4859-84e9-66ebd1047e97\") " pod="openstack/nova-api-0" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.639394 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/087223b2-e95e-4876-8bb1-d9fa4cab5575-logs\") pod \"nova-metadata-0\" (UID: \"087223b2-e95e-4876-8bb1-d9fa4cab5575\") " pod="openstack/nova-metadata-0" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.639513 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b34ff414-f728-4ce2-98a5-cc4e005a0a3c-config-data\") pod \"nova-scheduler-0\" (UID: \"b34ff414-f728-4ce2-98a5-cc4e005a0a3c\") " pod="openstack/nova-scheduler-0" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.639588 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01d9cfc0-d700-4859-84e9-66ebd1047e97-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"01d9cfc0-d700-4859-84e9-66ebd1047e97\") " pod="openstack/nova-api-0" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.639669 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/087223b2-e95e-4876-8bb1-d9fa4cab5575-config-data\") pod \"nova-metadata-0\" (UID: \"087223b2-e95e-4876-8bb1-d9fa4cab5575\") " pod="openstack/nova-metadata-0" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.639738 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/087223b2-e95e-4876-8bb1-d9fa4cab5575-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"087223b2-e95e-4876-8bb1-d9fa4cab5575\") " pod="openstack/nova-metadata-0" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.639829 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/01d9cfc0-d700-4859-84e9-66ebd1047e97-public-tls-certs\") pod \"nova-api-0\" (UID: \"01d9cfc0-d700-4859-84e9-66ebd1047e97\") " pod="openstack/nova-api-0" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.642838 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/087223b2-e95e-4876-8bb1-d9fa4cab5575-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"087223b2-e95e-4876-8bb1-d9fa4cab5575\") " pod="openstack/nova-metadata-0" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.642994 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b34ff414-f728-4ce2-98a5-cc4e005a0a3c-config-data\") pod \"nova-scheduler-0\" (UID: \"b34ff414-f728-4ce2-98a5-cc4e005a0a3c\") " pod="openstack/nova-scheduler-0" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.643056 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/087223b2-e95e-4876-8bb1-d9fa4cab5575-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"087223b2-e95e-4876-8bb1-d9fa4cab5575\") " pod="openstack/nova-metadata-0" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.643545 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b34ff414-f728-4ce2-98a5-cc4e005a0a3c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"b34ff414-f728-4ce2-98a5-cc4e005a0a3c\") " pod="openstack/nova-scheduler-0" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.643688 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/087223b2-e95e-4876-8bb1-d9fa4cab5575-config-data\") pod \"nova-metadata-0\" (UID: \"087223b2-e95e-4876-8bb1-d9fa4cab5575\") " pod="openstack/nova-metadata-0" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.653860 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ck5bp\" (UniqueName: \"kubernetes.io/projected/087223b2-e95e-4876-8bb1-d9fa4cab5575-kube-api-access-ck5bp\") pod \"nova-metadata-0\" (UID: \"087223b2-e95e-4876-8bb1-d9fa4cab5575\") " pod="openstack/nova-metadata-0" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.659320 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-djd2j\" (UniqueName: \"kubernetes.io/projected/b34ff414-f728-4ce2-98a5-cc4e005a0a3c-kube-api-access-djd2j\") pod \"nova-scheduler-0\" (UID: \"b34ff414-f728-4ce2-98a5-cc4e005a0a3c\") " pod="openstack/nova-scheduler-0" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.726192 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="41eb7f46-6e65-479d-bca0-29471323c4d7" path="/var/lib/kubelet/pods/41eb7f46-6e65-479d-bca0-29471323c4d7/volumes" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.727256 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="72361da2-53e5-428d-acb4-4c611f745f5d" path="/var/lib/kubelet/pods/72361da2-53e5-428d-acb4-4c611f745f5d/volumes" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.728184 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d517750c-87ac-4c0a-b80e-705b03317ba0" path="/var/lib/kubelet/pods/d517750c-87ac-4c0a-b80e-705b03317ba0/volumes" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.741930 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/01d9cfc0-d700-4859-84e9-66ebd1047e97-logs\") pod \"nova-api-0\" (UID: \"01d9cfc0-d700-4859-84e9-66ebd1047e97\") " pod="openstack/nova-api-0" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.742037 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/01d9cfc0-d700-4859-84e9-66ebd1047e97-config-data\") pod \"nova-api-0\" (UID: \"01d9cfc0-d700-4859-84e9-66ebd1047e97\") " pod="openstack/nova-api-0" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.742100 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01d9cfc0-d700-4859-84e9-66ebd1047e97-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"01d9cfc0-d700-4859-84e9-66ebd1047e97\") " pod="openstack/nova-api-0" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.742136 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/01d9cfc0-d700-4859-84e9-66ebd1047e97-public-tls-certs\") pod \"nova-api-0\" (UID: \"01d9cfc0-d700-4859-84e9-66ebd1047e97\") " pod="openstack/nova-api-0" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.742195 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mmlhp\" (UniqueName: \"kubernetes.io/projected/01d9cfc0-d700-4859-84e9-66ebd1047e97-kube-api-access-mmlhp\") pod \"nova-api-0\" (UID: \"01d9cfc0-d700-4859-84e9-66ebd1047e97\") " pod="openstack/nova-api-0" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.742231 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/01d9cfc0-d700-4859-84e9-66ebd1047e97-internal-tls-certs\") pod \"nova-api-0\" (UID: \"01d9cfc0-d700-4859-84e9-66ebd1047e97\") " pod="openstack/nova-api-0" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.742453 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/01d9cfc0-d700-4859-84e9-66ebd1047e97-logs\") pod \"nova-api-0\" (UID: \"01d9cfc0-d700-4859-84e9-66ebd1047e97\") " pod="openstack/nova-api-0" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.745782 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/01d9cfc0-d700-4859-84e9-66ebd1047e97-public-tls-certs\") pod \"nova-api-0\" (UID: \"01d9cfc0-d700-4859-84e9-66ebd1047e97\") " pod="openstack/nova-api-0" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.745940 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/01d9cfc0-d700-4859-84e9-66ebd1047e97-internal-tls-certs\") pod \"nova-api-0\" (UID: \"01d9cfc0-d700-4859-84e9-66ebd1047e97\") " pod="openstack/nova-api-0" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.746251 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01d9cfc0-d700-4859-84e9-66ebd1047e97-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"01d9cfc0-d700-4859-84e9-66ebd1047e97\") " pod="openstack/nova-api-0" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.747058 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/01d9cfc0-d700-4859-84e9-66ebd1047e97-config-data\") pod \"nova-api-0\" (UID: \"01d9cfc0-d700-4859-84e9-66ebd1047e97\") " pod="openstack/nova-api-0" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.759866 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mmlhp\" (UniqueName: \"kubernetes.io/projected/01d9cfc0-d700-4859-84e9-66ebd1047e97-kube-api-access-mmlhp\") pod \"nova-api-0\" (UID: \"01d9cfc0-d700-4859-84e9-66ebd1047e97\") " pod="openstack/nova-api-0" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.777466 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.812724 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 09:49:48 crc kubenswrapper[4779]: I0929 09:49:48.863468 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 09:49:49 crc kubenswrapper[4779]: I0929 09:49:49.249667 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 09:49:49 crc kubenswrapper[4779]: W0929 09:49:49.253305 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb34ff414_f728_4ce2_98a5_cc4e005a0a3c.slice/crio-d2a46b5ddc7eb16c37ac102f5f00fc0fc38a3ab785f9762806506c0a61999d88 WatchSource:0}: Error finding container d2a46b5ddc7eb16c37ac102f5f00fc0fc38a3ab785f9762806506c0a61999d88: Status 404 returned error can't find the container with id d2a46b5ddc7eb16c37ac102f5f00fc0fc38a3ab785f9762806506c0a61999d88 Sep 29 09:49:49 crc kubenswrapper[4779]: I0929 09:49:49.345338 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 09:49:49 crc kubenswrapper[4779]: W0929 09:49:49.347890 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod087223b2_e95e_4876_8bb1_d9fa4cab5575.slice/crio-c27e91bc805706cc58c166e765702b3f861438d60510ae3ab202766a99c84246 WatchSource:0}: Error finding container c27e91bc805706cc58c166e765702b3f861438d60510ae3ab202766a99c84246: Status 404 returned error can't find the container with id c27e91bc805706cc58c166e765702b3f861438d60510ae3ab202766a99c84246 Sep 29 09:49:49 crc kubenswrapper[4779]: I0929 09:49:49.347934 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"b34ff414-f728-4ce2-98a5-cc4e005a0a3c","Type":"ContainerStarted","Data":"d2a46b5ddc7eb16c37ac102f5f00fc0fc38a3ab785f9762806506c0a61999d88"} Sep 29 09:49:49 crc kubenswrapper[4779]: I0929 09:49:49.419007 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 29 09:49:50 crc kubenswrapper[4779]: I0929 09:49:50.363679 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"087223b2-e95e-4876-8bb1-d9fa4cab5575","Type":"ContainerStarted","Data":"306be55670c21becb2cbf90dc2643b73a93fe23095a57125032935833abdec06"} Sep 29 09:49:50 crc kubenswrapper[4779]: I0929 09:49:50.364229 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"087223b2-e95e-4876-8bb1-d9fa4cab5575","Type":"ContainerStarted","Data":"4dcb5595d943f33d21ce5ad00a213a4ee85be0d9737cc1e8ff7bc97d4bd6a089"} Sep 29 09:49:50 crc kubenswrapper[4779]: I0929 09:49:50.364246 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"087223b2-e95e-4876-8bb1-d9fa4cab5575","Type":"ContainerStarted","Data":"c27e91bc805706cc58c166e765702b3f861438d60510ae3ab202766a99c84246"} Sep 29 09:49:50 crc kubenswrapper[4779]: I0929 09:49:50.365226 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"01d9cfc0-d700-4859-84e9-66ebd1047e97","Type":"ContainerStarted","Data":"18afdc8ae0735de0ea80a51f19297867fd798b97d05159b59b0478ab591e2667"} Sep 29 09:49:50 crc kubenswrapper[4779]: I0929 09:49:50.365257 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"01d9cfc0-d700-4859-84e9-66ebd1047e97","Type":"ContainerStarted","Data":"658a12e16ae72ab63367709869b342843213b6519dbe520ab06496a2d1ccca89"} Sep 29 09:49:50 crc kubenswrapper[4779]: I0929 09:49:50.365266 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"01d9cfc0-d700-4859-84e9-66ebd1047e97","Type":"ContainerStarted","Data":"84f80e4f1af1a934f7f075953c3d610b296d4ed94739a57300a4bcdb5d064e10"} Sep 29 09:49:50 crc kubenswrapper[4779]: I0929 09:49:50.366315 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"b34ff414-f728-4ce2-98a5-cc4e005a0a3c","Type":"ContainerStarted","Data":"5af2bf61609c288298a54e179e0669f5e240ec761ce597b5158b9b41680625ee"} Sep 29 09:49:50 crc kubenswrapper[4779]: I0929 09:49:50.380798 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.380782488 podStartE2EDuration="2.380782488s" podCreationTimestamp="2025-09-29 09:49:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:49:50.378202964 +0000 UTC m=+1222.359526878" watchObservedRunningTime="2025-09-29 09:49:50.380782488 +0000 UTC m=+1222.362106392" Sep 29 09:49:50 crc kubenswrapper[4779]: I0929 09:49:50.412903 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.412878776 podStartE2EDuration="2.412878776s" podCreationTimestamp="2025-09-29 09:49:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:49:50.398810034 +0000 UTC m=+1222.380133948" watchObservedRunningTime="2025-09-29 09:49:50.412878776 +0000 UTC m=+1222.394202680" Sep 29 09:49:50 crc kubenswrapper[4779]: I0929 09:49:50.427401 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.427383301 podStartE2EDuration="2.427383301s" podCreationTimestamp="2025-09-29 09:49:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:49:50.41931378 +0000 UTC m=+1222.400637684" watchObservedRunningTime="2025-09-29 09:49:50.427383301 +0000 UTC m=+1222.408707205" Sep 29 09:49:53 crc kubenswrapper[4779]: I0929 09:49:53.778138 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Sep 29 09:49:53 crc kubenswrapper[4779]: I0929 09:49:53.813158 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 29 09:49:53 crc kubenswrapper[4779]: I0929 09:49:53.813277 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 29 09:49:58 crc kubenswrapper[4779]: I0929 09:49:58.778449 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Sep 29 09:49:58 crc kubenswrapper[4779]: I0929 09:49:58.802244 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Sep 29 09:49:58 crc kubenswrapper[4779]: I0929 09:49:58.812920 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 29 09:49:58 crc kubenswrapper[4779]: I0929 09:49:58.812975 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 29 09:49:58 crc kubenswrapper[4779]: I0929 09:49:58.864806 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 29 09:49:58 crc kubenswrapper[4779]: I0929 09:49:58.864848 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 29 09:49:59 crc kubenswrapper[4779]: I0929 09:49:59.483262 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Sep 29 09:49:59 crc kubenswrapper[4779]: I0929 09:49:59.834298 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="087223b2-e95e-4876-8bb1-d9fa4cab5575" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.208:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 09:49:59 crc kubenswrapper[4779]: I0929 09:49:59.834719 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="087223b2-e95e-4876-8bb1-d9fa4cab5575" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.208:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 09:49:59 crc kubenswrapper[4779]: I0929 09:49:59.877116 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="01d9cfc0-d700-4859-84e9-66ebd1047e97" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.209:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 09:49:59 crc kubenswrapper[4779]: I0929 09:49:59.877116 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="01d9cfc0-d700-4859-84e9-66ebd1047e97" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.209:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 09:50:05 crc kubenswrapper[4779]: I0929 09:50:05.549643 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Sep 29 09:50:08 crc kubenswrapper[4779]: I0929 09:50:08.820544 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Sep 29 09:50:08 crc kubenswrapper[4779]: I0929 09:50:08.830187 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Sep 29 09:50:08 crc kubenswrapper[4779]: I0929 09:50:08.834414 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Sep 29 09:50:08 crc kubenswrapper[4779]: I0929 09:50:08.885815 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 29 09:50:08 crc kubenswrapper[4779]: I0929 09:50:08.888001 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 29 09:50:08 crc kubenswrapper[4779]: I0929 09:50:08.888593 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 29 09:50:08 crc kubenswrapper[4779]: I0929 09:50:08.895959 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 29 09:50:09 crc kubenswrapper[4779]: I0929 09:50:09.567252 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 29 09:50:09 crc kubenswrapper[4779]: I0929 09:50:09.572606 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Sep 29 09:50:09 crc kubenswrapper[4779]: I0929 09:50:09.581024 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 29 09:50:16 crc kubenswrapper[4779]: I0929 09:50:16.966130 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 09:50:16 crc kubenswrapper[4779]: I0929 09:50:16.966723 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 09:50:18 crc kubenswrapper[4779]: I0929 09:50:18.124383 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 09:50:18 crc kubenswrapper[4779]: I0929 09:50:18.954199 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 09:50:21 crc kubenswrapper[4779]: I0929 09:50:21.458878 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="8faade2a-9a07-45b9-99e4-b448b64afaaa" containerName="rabbitmq" containerID="cri-o://9b36ddd7a7d61bb24461a2467c281c67c711b0a2f731a9d0c87617f0e20b252b" gracePeriod=604797 Sep 29 09:50:21 crc kubenswrapper[4779]: I0929 09:50:21.989941 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="779f829e-6240-47a5-8d8d-9e279d316df7" containerName="rabbitmq" containerID="cri-o://08bb2d3249f22cfeeeba3ee4988fbc07f5810e42b8ff533515a1e185c10605e2" gracePeriod=604797 Sep 29 09:50:22 crc kubenswrapper[4779]: I0929 09:50:22.700861 4779 generic.go:334] "Generic (PLEG): container finished" podID="8faade2a-9a07-45b9-99e4-b448b64afaaa" containerID="9b36ddd7a7d61bb24461a2467c281c67c711b0a2f731a9d0c87617f0e20b252b" exitCode=0 Sep 29 09:50:22 crc kubenswrapper[4779]: I0929 09:50:22.700909 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"8faade2a-9a07-45b9-99e4-b448b64afaaa","Type":"ContainerDied","Data":"9b36ddd7a7d61bb24461a2467c281c67c711b0a2f731a9d0c87617f0e20b252b"} Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.034454 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.106402 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/8faade2a-9a07-45b9-99e4-b448b64afaaa-pod-info\") pod \"8faade2a-9a07-45b9-99e4-b448b64afaaa\" (UID: \"8faade2a-9a07-45b9-99e4-b448b64afaaa\") " Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.106455 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nrmmh\" (UniqueName: \"kubernetes.io/projected/8faade2a-9a07-45b9-99e4-b448b64afaaa-kube-api-access-nrmmh\") pod \"8faade2a-9a07-45b9-99e4-b448b64afaaa\" (UID: \"8faade2a-9a07-45b9-99e4-b448b64afaaa\") " Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.106483 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/8faade2a-9a07-45b9-99e4-b448b64afaaa-rabbitmq-plugins\") pod \"8faade2a-9a07-45b9-99e4-b448b64afaaa\" (UID: \"8faade2a-9a07-45b9-99e4-b448b64afaaa\") " Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.106511 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"8faade2a-9a07-45b9-99e4-b448b64afaaa\" (UID: \"8faade2a-9a07-45b9-99e4-b448b64afaaa\") " Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.106533 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/8faade2a-9a07-45b9-99e4-b448b64afaaa-server-conf\") pod \"8faade2a-9a07-45b9-99e4-b448b64afaaa\" (UID: \"8faade2a-9a07-45b9-99e4-b448b64afaaa\") " Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.106579 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/8faade2a-9a07-45b9-99e4-b448b64afaaa-rabbitmq-tls\") pod \"8faade2a-9a07-45b9-99e4-b448b64afaaa\" (UID: \"8faade2a-9a07-45b9-99e4-b448b64afaaa\") " Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.106646 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/8faade2a-9a07-45b9-99e4-b448b64afaaa-plugins-conf\") pod \"8faade2a-9a07-45b9-99e4-b448b64afaaa\" (UID: \"8faade2a-9a07-45b9-99e4-b448b64afaaa\") " Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.106670 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/8faade2a-9a07-45b9-99e4-b448b64afaaa-erlang-cookie-secret\") pod \"8faade2a-9a07-45b9-99e4-b448b64afaaa\" (UID: \"8faade2a-9a07-45b9-99e4-b448b64afaaa\") " Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.107441 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/8faade2a-9a07-45b9-99e4-b448b64afaaa-rabbitmq-confd\") pod \"8faade2a-9a07-45b9-99e4-b448b64afaaa\" (UID: \"8faade2a-9a07-45b9-99e4-b448b64afaaa\") " Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.107470 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8faade2a-9a07-45b9-99e4-b448b64afaaa-config-data\") pod \"8faade2a-9a07-45b9-99e4-b448b64afaaa\" (UID: \"8faade2a-9a07-45b9-99e4-b448b64afaaa\") " Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.107491 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/8faade2a-9a07-45b9-99e4-b448b64afaaa-rabbitmq-erlang-cookie\") pod \"8faade2a-9a07-45b9-99e4-b448b64afaaa\" (UID: \"8faade2a-9a07-45b9-99e4-b448b64afaaa\") " Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.108197 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8faade2a-9a07-45b9-99e4-b448b64afaaa-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "8faade2a-9a07-45b9-99e4-b448b64afaaa" (UID: "8faade2a-9a07-45b9-99e4-b448b64afaaa"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.111430 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8faade2a-9a07-45b9-99e4-b448b64afaaa-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "8faade2a-9a07-45b9-99e4-b448b64afaaa" (UID: "8faade2a-9a07-45b9-99e4-b448b64afaaa"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.111641 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8faade2a-9a07-45b9-99e4-b448b64afaaa-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "8faade2a-9a07-45b9-99e4-b448b64afaaa" (UID: "8faade2a-9a07-45b9-99e4-b448b64afaaa"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.113595 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8faade2a-9a07-45b9-99e4-b448b64afaaa-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "8faade2a-9a07-45b9-99e4-b448b64afaaa" (UID: "8faade2a-9a07-45b9-99e4-b448b64afaaa"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.116360 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "persistence") pod "8faade2a-9a07-45b9-99e4-b448b64afaaa" (UID: "8faade2a-9a07-45b9-99e4-b448b64afaaa"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.117252 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8faade2a-9a07-45b9-99e4-b448b64afaaa-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "8faade2a-9a07-45b9-99e4-b448b64afaaa" (UID: "8faade2a-9a07-45b9-99e4-b448b64afaaa"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.118010 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8faade2a-9a07-45b9-99e4-b448b64afaaa-kube-api-access-nrmmh" (OuterVolumeSpecName: "kube-api-access-nrmmh") pod "8faade2a-9a07-45b9-99e4-b448b64afaaa" (UID: "8faade2a-9a07-45b9-99e4-b448b64afaaa"). InnerVolumeSpecName "kube-api-access-nrmmh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.126787 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/8faade2a-9a07-45b9-99e4-b448b64afaaa-pod-info" (OuterVolumeSpecName: "pod-info") pod "8faade2a-9a07-45b9-99e4-b448b64afaaa" (UID: "8faade2a-9a07-45b9-99e4-b448b64afaaa"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.166372 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8faade2a-9a07-45b9-99e4-b448b64afaaa-config-data" (OuterVolumeSpecName: "config-data") pod "8faade2a-9a07-45b9-99e4-b448b64afaaa" (UID: "8faade2a-9a07-45b9-99e4-b448b64afaaa"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.193582 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8faade2a-9a07-45b9-99e4-b448b64afaaa-server-conf" (OuterVolumeSpecName: "server-conf") pod "8faade2a-9a07-45b9-99e4-b448b64afaaa" (UID: "8faade2a-9a07-45b9-99e4-b448b64afaaa"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.208673 4779 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.208705 4779 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/8faade2a-9a07-45b9-99e4-b448b64afaaa-server-conf\") on node \"crc\" DevicePath \"\"" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.208714 4779 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/8faade2a-9a07-45b9-99e4-b448b64afaaa-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.208723 4779 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/8faade2a-9a07-45b9-99e4-b448b64afaaa-plugins-conf\") on node \"crc\" DevicePath \"\"" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.208734 4779 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/8faade2a-9a07-45b9-99e4-b448b64afaaa-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.208743 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8faade2a-9a07-45b9-99e4-b448b64afaaa-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.208752 4779 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/8faade2a-9a07-45b9-99e4-b448b64afaaa-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.208760 4779 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/8faade2a-9a07-45b9-99e4-b448b64afaaa-pod-info\") on node \"crc\" DevicePath \"\"" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.208768 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nrmmh\" (UniqueName: \"kubernetes.io/projected/8faade2a-9a07-45b9-99e4-b448b64afaaa-kube-api-access-nrmmh\") on node \"crc\" DevicePath \"\"" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.208777 4779 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/8faade2a-9a07-45b9-99e4-b448b64afaaa-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.226713 4779 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.258116 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8faade2a-9a07-45b9-99e4-b448b64afaaa-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "8faade2a-9a07-45b9-99e4-b448b64afaaa" (UID: "8faade2a-9a07-45b9-99e4-b448b64afaaa"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.310161 4779 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.310482 4779 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/8faade2a-9a07-45b9-99e4-b448b64afaaa-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.457522 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.615492 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/779f829e-6240-47a5-8d8d-9e279d316df7-plugins-conf\") pod \"779f829e-6240-47a5-8d8d-9e279d316df7\" (UID: \"779f829e-6240-47a5-8d8d-9e279d316df7\") " Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.615583 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/779f829e-6240-47a5-8d8d-9e279d316df7-rabbitmq-erlang-cookie\") pod \"779f829e-6240-47a5-8d8d-9e279d316df7\" (UID: \"779f829e-6240-47a5-8d8d-9e279d316df7\") " Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.615652 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vz796\" (UniqueName: \"kubernetes.io/projected/779f829e-6240-47a5-8d8d-9e279d316df7-kube-api-access-vz796\") pod \"779f829e-6240-47a5-8d8d-9e279d316df7\" (UID: \"779f829e-6240-47a5-8d8d-9e279d316df7\") " Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.615695 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/779f829e-6240-47a5-8d8d-9e279d316df7-config-data\") pod \"779f829e-6240-47a5-8d8d-9e279d316df7\" (UID: \"779f829e-6240-47a5-8d8d-9e279d316df7\") " Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.615756 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/779f829e-6240-47a5-8d8d-9e279d316df7-erlang-cookie-secret\") pod \"779f829e-6240-47a5-8d8d-9e279d316df7\" (UID: \"779f829e-6240-47a5-8d8d-9e279d316df7\") " Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.615803 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/779f829e-6240-47a5-8d8d-9e279d316df7-rabbitmq-confd\") pod \"779f829e-6240-47a5-8d8d-9e279d316df7\" (UID: \"779f829e-6240-47a5-8d8d-9e279d316df7\") " Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.615832 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/779f829e-6240-47a5-8d8d-9e279d316df7-pod-info\") pod \"779f829e-6240-47a5-8d8d-9e279d316df7\" (UID: \"779f829e-6240-47a5-8d8d-9e279d316df7\") " Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.615878 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"779f829e-6240-47a5-8d8d-9e279d316df7\" (UID: \"779f829e-6240-47a5-8d8d-9e279d316df7\") " Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.615901 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/779f829e-6240-47a5-8d8d-9e279d316df7-rabbitmq-plugins\") pod \"779f829e-6240-47a5-8d8d-9e279d316df7\" (UID: \"779f829e-6240-47a5-8d8d-9e279d316df7\") " Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.615969 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/779f829e-6240-47a5-8d8d-9e279d316df7-rabbitmq-tls\") pod \"779f829e-6240-47a5-8d8d-9e279d316df7\" (UID: \"779f829e-6240-47a5-8d8d-9e279d316df7\") " Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.615999 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/779f829e-6240-47a5-8d8d-9e279d316df7-server-conf\") pod \"779f829e-6240-47a5-8d8d-9e279d316df7\" (UID: \"779f829e-6240-47a5-8d8d-9e279d316df7\") " Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.618645 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/779f829e-6240-47a5-8d8d-9e279d316df7-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "779f829e-6240-47a5-8d8d-9e279d316df7" (UID: "779f829e-6240-47a5-8d8d-9e279d316df7"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.620071 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/779f829e-6240-47a5-8d8d-9e279d316df7-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "779f829e-6240-47a5-8d8d-9e279d316df7" (UID: "779f829e-6240-47a5-8d8d-9e279d316df7"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.620262 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/779f829e-6240-47a5-8d8d-9e279d316df7-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "779f829e-6240-47a5-8d8d-9e279d316df7" (UID: "779f829e-6240-47a5-8d8d-9e279d316df7"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.620395 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "persistence") pod "779f829e-6240-47a5-8d8d-9e279d316df7" (UID: "779f829e-6240-47a5-8d8d-9e279d316df7"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.621415 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/779f829e-6240-47a5-8d8d-9e279d316df7-kube-api-access-vz796" (OuterVolumeSpecName: "kube-api-access-vz796") pod "779f829e-6240-47a5-8d8d-9e279d316df7" (UID: "779f829e-6240-47a5-8d8d-9e279d316df7"). InnerVolumeSpecName "kube-api-access-vz796". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.623063 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/779f829e-6240-47a5-8d8d-9e279d316df7-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "779f829e-6240-47a5-8d8d-9e279d316df7" (UID: "779f829e-6240-47a5-8d8d-9e279d316df7"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.623099 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/779f829e-6240-47a5-8d8d-9e279d316df7-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "779f829e-6240-47a5-8d8d-9e279d316df7" (UID: "779f829e-6240-47a5-8d8d-9e279d316df7"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.626991 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/779f829e-6240-47a5-8d8d-9e279d316df7-pod-info" (OuterVolumeSpecName: "pod-info") pod "779f829e-6240-47a5-8d8d-9e279d316df7" (UID: "779f829e-6240-47a5-8d8d-9e279d316df7"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.649666 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/779f829e-6240-47a5-8d8d-9e279d316df7-config-data" (OuterVolumeSpecName: "config-data") pod "779f829e-6240-47a5-8d8d-9e279d316df7" (UID: "779f829e-6240-47a5-8d8d-9e279d316df7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.685824 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/779f829e-6240-47a5-8d8d-9e279d316df7-server-conf" (OuterVolumeSpecName: "server-conf") pod "779f829e-6240-47a5-8d8d-9e279d316df7" (UID: "779f829e-6240-47a5-8d8d-9e279d316df7"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.716049 4779 generic.go:334] "Generic (PLEG): container finished" podID="779f829e-6240-47a5-8d8d-9e279d316df7" containerID="08bb2d3249f22cfeeeba3ee4988fbc07f5810e42b8ff533515a1e185c10605e2" exitCode=0 Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.716121 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"779f829e-6240-47a5-8d8d-9e279d316df7","Type":"ContainerDied","Data":"08bb2d3249f22cfeeeba3ee4988fbc07f5810e42b8ff533515a1e185c10605e2"} Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.716141 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"779f829e-6240-47a5-8d8d-9e279d316df7","Type":"ContainerDied","Data":"10c37261f0ce9eebaf30a61f3b0ec30a08c394a5065f89fbb65c2fa6f3efadcd"} Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.716157 4779 scope.go:117] "RemoveContainer" containerID="08bb2d3249f22cfeeeba3ee4988fbc07f5810e42b8ff533515a1e185c10605e2" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.716238 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.719551 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"8faade2a-9a07-45b9-99e4-b448b64afaaa","Type":"ContainerDied","Data":"621b630c1e35207125fe79ef7c88c8f1fc2945c4bbefe71ee5f3cac1578faa64"} Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.719604 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.725942 4779 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/779f829e-6240-47a5-8d8d-9e279d316df7-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.725968 4779 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/779f829e-6240-47a5-8d8d-9e279d316df7-pod-info\") on node \"crc\" DevicePath \"\"" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.725991 4779 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.726041 4779 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/779f829e-6240-47a5-8d8d-9e279d316df7-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.726091 4779 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/779f829e-6240-47a5-8d8d-9e279d316df7-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.726103 4779 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/779f829e-6240-47a5-8d8d-9e279d316df7-server-conf\") on node \"crc\" DevicePath \"\"" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.726112 4779 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/779f829e-6240-47a5-8d8d-9e279d316df7-plugins-conf\") on node \"crc\" DevicePath \"\"" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.726123 4779 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/779f829e-6240-47a5-8d8d-9e279d316df7-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.726136 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vz796\" (UniqueName: \"kubernetes.io/projected/779f829e-6240-47a5-8d8d-9e279d316df7-kube-api-access-vz796\") on node \"crc\" DevicePath \"\"" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.726171 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/779f829e-6240-47a5-8d8d-9e279d316df7-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.728284 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/779f829e-6240-47a5-8d8d-9e279d316df7-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "779f829e-6240-47a5-8d8d-9e279d316df7" (UID: "779f829e-6240-47a5-8d8d-9e279d316df7"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.752057 4779 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.829868 4779 scope.go:117] "RemoveContainer" containerID="02726faa48a5722b4e3e907fbde7d7ab6b4a0c58e2de3b1c06eb717c506392ff" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.835851 4779 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/779f829e-6240-47a5-8d8d-9e279d316df7-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.835885 4779 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.843844 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.861368 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.874271 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 09:50:23 crc kubenswrapper[4779]: E0929 09:50:23.874905 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="779f829e-6240-47a5-8d8d-9e279d316df7" containerName="setup-container" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.874933 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="779f829e-6240-47a5-8d8d-9e279d316df7" containerName="setup-container" Sep 29 09:50:23 crc kubenswrapper[4779]: E0929 09:50:23.874962 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8faade2a-9a07-45b9-99e4-b448b64afaaa" containerName="setup-container" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.874969 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="8faade2a-9a07-45b9-99e4-b448b64afaaa" containerName="setup-container" Sep 29 09:50:23 crc kubenswrapper[4779]: E0929 09:50:23.875012 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="779f829e-6240-47a5-8d8d-9e279d316df7" containerName="rabbitmq" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.875017 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="779f829e-6240-47a5-8d8d-9e279d316df7" containerName="rabbitmq" Sep 29 09:50:23 crc kubenswrapper[4779]: E0929 09:50:23.875025 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8faade2a-9a07-45b9-99e4-b448b64afaaa" containerName="rabbitmq" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.875030 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="8faade2a-9a07-45b9-99e4-b448b64afaaa" containerName="rabbitmq" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.875320 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="8faade2a-9a07-45b9-99e4-b448b64afaaa" containerName="rabbitmq" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.875355 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="779f829e-6240-47a5-8d8d-9e279d316df7" containerName="rabbitmq" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.876827 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.889867 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.910891 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.911010 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.911203 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-qglqq" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.911679 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.911741 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.911844 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.913685 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.933084 4779 scope.go:117] "RemoveContainer" containerID="08bb2d3249f22cfeeeba3ee4988fbc07f5810e42b8ff533515a1e185c10605e2" Sep 29 09:50:23 crc kubenswrapper[4779]: E0929 09:50:23.936590 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"08bb2d3249f22cfeeeba3ee4988fbc07f5810e42b8ff533515a1e185c10605e2\": container with ID starting with 08bb2d3249f22cfeeeba3ee4988fbc07f5810e42b8ff533515a1e185c10605e2 not found: ID does not exist" containerID="08bb2d3249f22cfeeeba3ee4988fbc07f5810e42b8ff533515a1e185c10605e2" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.936628 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"08bb2d3249f22cfeeeba3ee4988fbc07f5810e42b8ff533515a1e185c10605e2"} err="failed to get container status \"08bb2d3249f22cfeeeba3ee4988fbc07f5810e42b8ff533515a1e185c10605e2\": rpc error: code = NotFound desc = could not find container \"08bb2d3249f22cfeeeba3ee4988fbc07f5810e42b8ff533515a1e185c10605e2\": container with ID starting with 08bb2d3249f22cfeeeba3ee4988fbc07f5810e42b8ff533515a1e185c10605e2 not found: ID does not exist" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.936650 4779 scope.go:117] "RemoveContainer" containerID="02726faa48a5722b4e3e907fbde7d7ab6b4a0c58e2de3b1c06eb717c506392ff" Sep 29 09:50:23 crc kubenswrapper[4779]: E0929 09:50:23.937061 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"02726faa48a5722b4e3e907fbde7d7ab6b4a0c58e2de3b1c06eb717c506392ff\": container with ID starting with 02726faa48a5722b4e3e907fbde7d7ab6b4a0c58e2de3b1c06eb717c506392ff not found: ID does not exist" containerID="02726faa48a5722b4e3e907fbde7d7ab6b4a0c58e2de3b1c06eb717c506392ff" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.937081 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"02726faa48a5722b4e3e907fbde7d7ab6b4a0c58e2de3b1c06eb717c506392ff"} err="failed to get container status \"02726faa48a5722b4e3e907fbde7d7ab6b4a0c58e2de3b1c06eb717c506392ff\": rpc error: code = NotFound desc = could not find container \"02726faa48a5722b4e3e907fbde7d7ab6b4a0c58e2de3b1c06eb717c506392ff\": container with ID starting with 02726faa48a5722b4e3e907fbde7d7ab6b4a0c58e2de3b1c06eb717c506392ff not found: ID does not exist" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.937103 4779 scope.go:117] "RemoveContainer" containerID="9b36ddd7a7d61bb24461a2467c281c67c711b0a2f731a9d0c87617f0e20b252b" Sep 29 09:50:23 crc kubenswrapper[4779]: I0929 09:50:23.973895 4779 scope.go:117] "RemoveContainer" containerID="d06451a6c3888fe27bb17e40eff8efe95ff1aed4e982b6c94a8c786dad8579f6" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.057633 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.060447 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/9a6cc117-b53c-4f46-bbe7-721a5e656cc4-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"9a6cc117-b53c-4f46-bbe7-721a5e656cc4\") " pod="openstack/rabbitmq-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.060693 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/9a6cc117-b53c-4f46-bbe7-721a5e656cc4-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"9a6cc117-b53c-4f46-bbe7-721a5e656cc4\") " pod="openstack/rabbitmq-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.060827 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/9a6cc117-b53c-4f46-bbe7-721a5e656cc4-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"9a6cc117-b53c-4f46-bbe7-721a5e656cc4\") " pod="openstack/rabbitmq-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.060982 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/9a6cc117-b53c-4f46-bbe7-721a5e656cc4-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"9a6cc117-b53c-4f46-bbe7-721a5e656cc4\") " pod="openstack/rabbitmq-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.061149 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/9a6cc117-b53c-4f46-bbe7-721a5e656cc4-pod-info\") pod \"rabbitmq-server-0\" (UID: \"9a6cc117-b53c-4f46-bbe7-721a5e656cc4\") " pod="openstack/rabbitmq-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.061284 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9a6cc117-b53c-4f46-bbe7-721a5e656cc4-config-data\") pod \"rabbitmq-server-0\" (UID: \"9a6cc117-b53c-4f46-bbe7-721a5e656cc4\") " pod="openstack/rabbitmq-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.061374 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/9a6cc117-b53c-4f46-bbe7-721a5e656cc4-server-conf\") pod \"rabbitmq-server-0\" (UID: \"9a6cc117-b53c-4f46-bbe7-721a5e656cc4\") " pod="openstack/rabbitmq-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.061538 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/9a6cc117-b53c-4f46-bbe7-721a5e656cc4-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"9a6cc117-b53c-4f46-bbe7-721a5e656cc4\") " pod="openstack/rabbitmq-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.061666 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-txxlf\" (UniqueName: \"kubernetes.io/projected/9a6cc117-b53c-4f46-bbe7-721a5e656cc4-kube-api-access-txxlf\") pod \"rabbitmq-server-0\" (UID: \"9a6cc117-b53c-4f46-bbe7-721a5e656cc4\") " pod="openstack/rabbitmq-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.061795 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/9a6cc117-b53c-4f46-bbe7-721a5e656cc4-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"9a6cc117-b53c-4f46-bbe7-721a5e656cc4\") " pod="openstack/rabbitmq-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.061976 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"9a6cc117-b53c-4f46-bbe7-721a5e656cc4\") " pod="openstack/rabbitmq-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.074789 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.091830 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.093837 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.096557 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.099821 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.100135 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.100268 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.100425 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.100564 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-6fz62" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.100652 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.100682 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.171062 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"9a6cc117-b53c-4f46-bbe7-721a5e656cc4\") " pod="openstack/rabbitmq-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.171166 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/9a6cc117-b53c-4f46-bbe7-721a5e656cc4-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"9a6cc117-b53c-4f46-bbe7-721a5e656cc4\") " pod="openstack/rabbitmq-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.171199 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/9a6cc117-b53c-4f46-bbe7-721a5e656cc4-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"9a6cc117-b53c-4f46-bbe7-721a5e656cc4\") " pod="openstack/rabbitmq-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.171237 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/9a6cc117-b53c-4f46-bbe7-721a5e656cc4-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"9a6cc117-b53c-4f46-bbe7-721a5e656cc4\") " pod="openstack/rabbitmq-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.171280 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/9a6cc117-b53c-4f46-bbe7-721a5e656cc4-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"9a6cc117-b53c-4f46-bbe7-721a5e656cc4\") " pod="openstack/rabbitmq-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.171335 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/9a6cc117-b53c-4f46-bbe7-721a5e656cc4-pod-info\") pod \"rabbitmq-server-0\" (UID: \"9a6cc117-b53c-4f46-bbe7-721a5e656cc4\") " pod="openstack/rabbitmq-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.171379 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9a6cc117-b53c-4f46-bbe7-721a5e656cc4-config-data\") pod \"rabbitmq-server-0\" (UID: \"9a6cc117-b53c-4f46-bbe7-721a5e656cc4\") " pod="openstack/rabbitmq-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.171403 4779 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"9a6cc117-b53c-4f46-bbe7-721a5e656cc4\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/rabbitmq-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.171412 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/9a6cc117-b53c-4f46-bbe7-721a5e656cc4-server-conf\") pod \"rabbitmq-server-0\" (UID: \"9a6cc117-b53c-4f46-bbe7-721a5e656cc4\") " pod="openstack/rabbitmq-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.171786 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/9a6cc117-b53c-4f46-bbe7-721a5e656cc4-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"9a6cc117-b53c-4f46-bbe7-721a5e656cc4\") " pod="openstack/rabbitmq-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.171908 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/9a6cc117-b53c-4f46-bbe7-721a5e656cc4-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"9a6cc117-b53c-4f46-bbe7-721a5e656cc4\") " pod="openstack/rabbitmq-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.171958 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-txxlf\" (UniqueName: \"kubernetes.io/projected/9a6cc117-b53c-4f46-bbe7-721a5e656cc4-kube-api-access-txxlf\") pod \"rabbitmq-server-0\" (UID: \"9a6cc117-b53c-4f46-bbe7-721a5e656cc4\") " pod="openstack/rabbitmq-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.172028 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/9a6cc117-b53c-4f46-bbe7-721a5e656cc4-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"9a6cc117-b53c-4f46-bbe7-721a5e656cc4\") " pod="openstack/rabbitmq-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.172262 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/9a6cc117-b53c-4f46-bbe7-721a5e656cc4-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"9a6cc117-b53c-4f46-bbe7-721a5e656cc4\") " pod="openstack/rabbitmq-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.173215 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/9a6cc117-b53c-4f46-bbe7-721a5e656cc4-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"9a6cc117-b53c-4f46-bbe7-721a5e656cc4\") " pod="openstack/rabbitmq-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.173228 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9a6cc117-b53c-4f46-bbe7-721a5e656cc4-config-data\") pod \"rabbitmq-server-0\" (UID: \"9a6cc117-b53c-4f46-bbe7-721a5e656cc4\") " pod="openstack/rabbitmq-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.174926 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/9a6cc117-b53c-4f46-bbe7-721a5e656cc4-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"9a6cc117-b53c-4f46-bbe7-721a5e656cc4\") " pod="openstack/rabbitmq-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.175246 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/9a6cc117-b53c-4f46-bbe7-721a5e656cc4-server-conf\") pod \"rabbitmq-server-0\" (UID: \"9a6cc117-b53c-4f46-bbe7-721a5e656cc4\") " pod="openstack/rabbitmq-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.175665 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/9a6cc117-b53c-4f46-bbe7-721a5e656cc4-pod-info\") pod \"rabbitmq-server-0\" (UID: \"9a6cc117-b53c-4f46-bbe7-721a5e656cc4\") " pod="openstack/rabbitmq-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.178211 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/9a6cc117-b53c-4f46-bbe7-721a5e656cc4-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"9a6cc117-b53c-4f46-bbe7-721a5e656cc4\") " pod="openstack/rabbitmq-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.178483 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/9a6cc117-b53c-4f46-bbe7-721a5e656cc4-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"9a6cc117-b53c-4f46-bbe7-721a5e656cc4\") " pod="openstack/rabbitmq-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.191878 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-txxlf\" (UniqueName: \"kubernetes.io/projected/9a6cc117-b53c-4f46-bbe7-721a5e656cc4-kube-api-access-txxlf\") pod \"rabbitmq-server-0\" (UID: \"9a6cc117-b53c-4f46-bbe7-721a5e656cc4\") " pod="openstack/rabbitmq-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.206634 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"9a6cc117-b53c-4f46-bbe7-721a5e656cc4\") " pod="openstack/rabbitmq-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.231763 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.273960 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/31656a9a-a9ca-46d0-b682-29e0ddde8ff7-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"31656a9a-a9ca-46d0-b682-29e0ddde8ff7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.274011 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/31656a9a-a9ca-46d0-b682-29e0ddde8ff7-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"31656a9a-a9ca-46d0-b682-29e0ddde8ff7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.274046 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/31656a9a-a9ca-46d0-b682-29e0ddde8ff7-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"31656a9a-a9ca-46d0-b682-29e0ddde8ff7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.274088 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/31656a9a-a9ca-46d0-b682-29e0ddde8ff7-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"31656a9a-a9ca-46d0-b682-29e0ddde8ff7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.274131 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"31656a9a-a9ca-46d0-b682-29e0ddde8ff7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.274154 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/31656a9a-a9ca-46d0-b682-29e0ddde8ff7-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"31656a9a-a9ca-46d0-b682-29e0ddde8ff7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.274204 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qk986\" (UniqueName: \"kubernetes.io/projected/31656a9a-a9ca-46d0-b682-29e0ddde8ff7-kube-api-access-qk986\") pod \"rabbitmq-cell1-server-0\" (UID: \"31656a9a-a9ca-46d0-b682-29e0ddde8ff7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.274537 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/31656a9a-a9ca-46d0-b682-29e0ddde8ff7-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"31656a9a-a9ca-46d0-b682-29e0ddde8ff7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.274690 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/31656a9a-a9ca-46d0-b682-29e0ddde8ff7-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"31656a9a-a9ca-46d0-b682-29e0ddde8ff7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.274765 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/31656a9a-a9ca-46d0-b682-29e0ddde8ff7-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"31656a9a-a9ca-46d0-b682-29e0ddde8ff7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.274982 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/31656a9a-a9ca-46d0-b682-29e0ddde8ff7-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"31656a9a-a9ca-46d0-b682-29e0ddde8ff7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.377070 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/31656a9a-a9ca-46d0-b682-29e0ddde8ff7-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"31656a9a-a9ca-46d0-b682-29e0ddde8ff7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.377358 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/31656a9a-a9ca-46d0-b682-29e0ddde8ff7-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"31656a9a-a9ca-46d0-b682-29e0ddde8ff7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.377423 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/31656a9a-a9ca-46d0-b682-29e0ddde8ff7-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"31656a9a-a9ca-46d0-b682-29e0ddde8ff7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.377484 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/31656a9a-a9ca-46d0-b682-29e0ddde8ff7-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"31656a9a-a9ca-46d0-b682-29e0ddde8ff7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.377505 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/31656a9a-a9ca-46d0-b682-29e0ddde8ff7-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"31656a9a-a9ca-46d0-b682-29e0ddde8ff7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.377531 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/31656a9a-a9ca-46d0-b682-29e0ddde8ff7-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"31656a9a-a9ca-46d0-b682-29e0ddde8ff7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.377542 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/31656a9a-a9ca-46d0-b682-29e0ddde8ff7-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"31656a9a-a9ca-46d0-b682-29e0ddde8ff7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.377584 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/31656a9a-a9ca-46d0-b682-29e0ddde8ff7-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"31656a9a-a9ca-46d0-b682-29e0ddde8ff7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.377639 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"31656a9a-a9ca-46d0-b682-29e0ddde8ff7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.377665 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/31656a9a-a9ca-46d0-b682-29e0ddde8ff7-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"31656a9a-a9ca-46d0-b682-29e0ddde8ff7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.377721 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qk986\" (UniqueName: \"kubernetes.io/projected/31656a9a-a9ca-46d0-b682-29e0ddde8ff7-kube-api-access-qk986\") pod \"rabbitmq-cell1-server-0\" (UID: \"31656a9a-a9ca-46d0-b682-29e0ddde8ff7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.377771 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/31656a9a-a9ca-46d0-b682-29e0ddde8ff7-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"31656a9a-a9ca-46d0-b682-29e0ddde8ff7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.377836 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/31656a9a-a9ca-46d0-b682-29e0ddde8ff7-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"31656a9a-a9ca-46d0-b682-29e0ddde8ff7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.378432 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/31656a9a-a9ca-46d0-b682-29e0ddde8ff7-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"31656a9a-a9ca-46d0-b682-29e0ddde8ff7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.378538 4779 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"31656a9a-a9ca-46d0-b682-29e0ddde8ff7\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.379163 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/31656a9a-a9ca-46d0-b682-29e0ddde8ff7-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"31656a9a-a9ca-46d0-b682-29e0ddde8ff7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.380213 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/31656a9a-a9ca-46d0-b682-29e0ddde8ff7-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"31656a9a-a9ca-46d0-b682-29e0ddde8ff7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.386978 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/31656a9a-a9ca-46d0-b682-29e0ddde8ff7-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"31656a9a-a9ca-46d0-b682-29e0ddde8ff7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.408730 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/31656a9a-a9ca-46d0-b682-29e0ddde8ff7-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"31656a9a-a9ca-46d0-b682-29e0ddde8ff7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.413303 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/31656a9a-a9ca-46d0-b682-29e0ddde8ff7-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"31656a9a-a9ca-46d0-b682-29e0ddde8ff7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.422422 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/31656a9a-a9ca-46d0-b682-29e0ddde8ff7-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"31656a9a-a9ca-46d0-b682-29e0ddde8ff7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.427776 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qk986\" (UniqueName: \"kubernetes.io/projected/31656a9a-a9ca-46d0-b682-29e0ddde8ff7-kube-api-access-qk986\") pod \"rabbitmq-cell1-server-0\" (UID: \"31656a9a-a9ca-46d0-b682-29e0ddde8ff7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.474096 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"31656a9a-a9ca-46d0-b682-29e0ddde8ff7\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.716968 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.728735 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="779f829e-6240-47a5-8d8d-9e279d316df7" path="/var/lib/kubelet/pods/779f829e-6240-47a5-8d8d-9e279d316df7/volumes" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.730753 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8faade2a-9a07-45b9-99e4-b448b64afaaa" path="/var/lib/kubelet/pods/8faade2a-9a07-45b9-99e4-b448b64afaaa/volumes" Sep 29 09:50:24 crc kubenswrapper[4779]: I0929 09:50:24.769979 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 09:50:25 crc kubenswrapper[4779]: I0929 09:50:25.196726 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 09:50:25 crc kubenswrapper[4779]: I0929 09:50:25.744673 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"31656a9a-a9ca-46d0-b682-29e0ddde8ff7","Type":"ContainerStarted","Data":"d72026f30b9e46df85773deb10cf49f0a134b3ff1bdcbf96a4ec5d523772b332"} Sep 29 09:50:25 crc kubenswrapper[4779]: I0929 09:50:25.744970 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"31656a9a-a9ca-46d0-b682-29e0ddde8ff7","Type":"ContainerStarted","Data":"4ab7aedb7ecb920034e0b52d290b521cf2dc3ee33fcf75ae44b014acad2eabe7"} Sep 29 09:50:25 crc kubenswrapper[4779]: I0929 09:50:25.748215 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"9a6cc117-b53c-4f46-bbe7-721a5e656cc4","Type":"ContainerStarted","Data":"99a8b9774d4c693c873a0d8fe718fbf8f3c4c91c18ee533dbc6b5646f934410e"} Sep 29 09:50:25 crc kubenswrapper[4779]: I0929 09:50:25.748274 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"9a6cc117-b53c-4f46-bbe7-721a5e656cc4","Type":"ContainerStarted","Data":"34774a5c7f00232d7bba26eb96cd9c88119decfd9db6bec8296967129d0c282f"} Sep 29 09:50:34 crc kubenswrapper[4779]: I0929 09:50:34.618844 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-769db7c785-dd9lp"] Sep 29 09:50:34 crc kubenswrapper[4779]: I0929 09:50:34.621859 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-769db7c785-dd9lp" Sep 29 09:50:34 crc kubenswrapper[4779]: I0929 09:50:34.626041 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Sep 29 09:50:34 crc kubenswrapper[4779]: I0929 09:50:34.642039 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-769db7c785-dd9lp"] Sep 29 09:50:34 crc kubenswrapper[4779]: I0929 09:50:34.773674 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d78sx\" (UniqueName: \"kubernetes.io/projected/7eb3222b-a4a7-41b2-a46c-0a43603fe3e7-kube-api-access-d78sx\") pod \"dnsmasq-dns-769db7c785-dd9lp\" (UID: \"7eb3222b-a4a7-41b2-a46c-0a43603fe3e7\") " pod="openstack/dnsmasq-dns-769db7c785-dd9lp" Sep 29 09:50:34 crc kubenswrapper[4779]: I0929 09:50:34.773726 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7eb3222b-a4a7-41b2-a46c-0a43603fe3e7-dns-svc\") pod \"dnsmasq-dns-769db7c785-dd9lp\" (UID: \"7eb3222b-a4a7-41b2-a46c-0a43603fe3e7\") " pod="openstack/dnsmasq-dns-769db7c785-dd9lp" Sep 29 09:50:34 crc kubenswrapper[4779]: I0929 09:50:34.773780 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/7eb3222b-a4a7-41b2-a46c-0a43603fe3e7-openstack-edpm-ipam\") pod \"dnsmasq-dns-769db7c785-dd9lp\" (UID: \"7eb3222b-a4a7-41b2-a46c-0a43603fe3e7\") " pod="openstack/dnsmasq-dns-769db7c785-dd9lp" Sep 29 09:50:34 crc kubenswrapper[4779]: I0929 09:50:34.774030 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7eb3222b-a4a7-41b2-a46c-0a43603fe3e7-ovsdbserver-nb\") pod \"dnsmasq-dns-769db7c785-dd9lp\" (UID: \"7eb3222b-a4a7-41b2-a46c-0a43603fe3e7\") " pod="openstack/dnsmasq-dns-769db7c785-dd9lp" Sep 29 09:50:34 crc kubenswrapper[4779]: I0929 09:50:34.774160 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7eb3222b-a4a7-41b2-a46c-0a43603fe3e7-config\") pod \"dnsmasq-dns-769db7c785-dd9lp\" (UID: \"7eb3222b-a4a7-41b2-a46c-0a43603fe3e7\") " pod="openstack/dnsmasq-dns-769db7c785-dd9lp" Sep 29 09:50:34 crc kubenswrapper[4779]: I0929 09:50:34.774199 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7eb3222b-a4a7-41b2-a46c-0a43603fe3e7-ovsdbserver-sb\") pod \"dnsmasq-dns-769db7c785-dd9lp\" (UID: \"7eb3222b-a4a7-41b2-a46c-0a43603fe3e7\") " pod="openstack/dnsmasq-dns-769db7c785-dd9lp" Sep 29 09:50:34 crc kubenswrapper[4779]: I0929 09:50:34.876168 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d78sx\" (UniqueName: \"kubernetes.io/projected/7eb3222b-a4a7-41b2-a46c-0a43603fe3e7-kube-api-access-d78sx\") pod \"dnsmasq-dns-769db7c785-dd9lp\" (UID: \"7eb3222b-a4a7-41b2-a46c-0a43603fe3e7\") " pod="openstack/dnsmasq-dns-769db7c785-dd9lp" Sep 29 09:50:34 crc kubenswrapper[4779]: I0929 09:50:34.876225 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7eb3222b-a4a7-41b2-a46c-0a43603fe3e7-dns-svc\") pod \"dnsmasq-dns-769db7c785-dd9lp\" (UID: \"7eb3222b-a4a7-41b2-a46c-0a43603fe3e7\") " pod="openstack/dnsmasq-dns-769db7c785-dd9lp" Sep 29 09:50:34 crc kubenswrapper[4779]: I0929 09:50:34.876315 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/7eb3222b-a4a7-41b2-a46c-0a43603fe3e7-openstack-edpm-ipam\") pod \"dnsmasq-dns-769db7c785-dd9lp\" (UID: \"7eb3222b-a4a7-41b2-a46c-0a43603fe3e7\") " pod="openstack/dnsmasq-dns-769db7c785-dd9lp" Sep 29 09:50:34 crc kubenswrapper[4779]: I0929 09:50:34.876360 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7eb3222b-a4a7-41b2-a46c-0a43603fe3e7-ovsdbserver-nb\") pod \"dnsmasq-dns-769db7c785-dd9lp\" (UID: \"7eb3222b-a4a7-41b2-a46c-0a43603fe3e7\") " pod="openstack/dnsmasq-dns-769db7c785-dd9lp" Sep 29 09:50:34 crc kubenswrapper[4779]: I0929 09:50:34.876391 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7eb3222b-a4a7-41b2-a46c-0a43603fe3e7-config\") pod \"dnsmasq-dns-769db7c785-dd9lp\" (UID: \"7eb3222b-a4a7-41b2-a46c-0a43603fe3e7\") " pod="openstack/dnsmasq-dns-769db7c785-dd9lp" Sep 29 09:50:34 crc kubenswrapper[4779]: I0929 09:50:34.876410 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7eb3222b-a4a7-41b2-a46c-0a43603fe3e7-ovsdbserver-sb\") pod \"dnsmasq-dns-769db7c785-dd9lp\" (UID: \"7eb3222b-a4a7-41b2-a46c-0a43603fe3e7\") " pod="openstack/dnsmasq-dns-769db7c785-dd9lp" Sep 29 09:50:34 crc kubenswrapper[4779]: I0929 09:50:34.877505 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7eb3222b-a4a7-41b2-a46c-0a43603fe3e7-config\") pod \"dnsmasq-dns-769db7c785-dd9lp\" (UID: \"7eb3222b-a4a7-41b2-a46c-0a43603fe3e7\") " pod="openstack/dnsmasq-dns-769db7c785-dd9lp" Sep 29 09:50:34 crc kubenswrapper[4779]: I0929 09:50:34.877530 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7eb3222b-a4a7-41b2-a46c-0a43603fe3e7-ovsdbserver-sb\") pod \"dnsmasq-dns-769db7c785-dd9lp\" (UID: \"7eb3222b-a4a7-41b2-a46c-0a43603fe3e7\") " pod="openstack/dnsmasq-dns-769db7c785-dd9lp" Sep 29 09:50:34 crc kubenswrapper[4779]: I0929 09:50:34.877556 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7eb3222b-a4a7-41b2-a46c-0a43603fe3e7-ovsdbserver-nb\") pod \"dnsmasq-dns-769db7c785-dd9lp\" (UID: \"7eb3222b-a4a7-41b2-a46c-0a43603fe3e7\") " pod="openstack/dnsmasq-dns-769db7c785-dd9lp" Sep 29 09:50:34 crc kubenswrapper[4779]: I0929 09:50:34.877845 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7eb3222b-a4a7-41b2-a46c-0a43603fe3e7-dns-svc\") pod \"dnsmasq-dns-769db7c785-dd9lp\" (UID: \"7eb3222b-a4a7-41b2-a46c-0a43603fe3e7\") " pod="openstack/dnsmasq-dns-769db7c785-dd9lp" Sep 29 09:50:34 crc kubenswrapper[4779]: I0929 09:50:34.877963 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/7eb3222b-a4a7-41b2-a46c-0a43603fe3e7-openstack-edpm-ipam\") pod \"dnsmasq-dns-769db7c785-dd9lp\" (UID: \"7eb3222b-a4a7-41b2-a46c-0a43603fe3e7\") " pod="openstack/dnsmasq-dns-769db7c785-dd9lp" Sep 29 09:50:34 crc kubenswrapper[4779]: I0929 09:50:34.895474 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d78sx\" (UniqueName: \"kubernetes.io/projected/7eb3222b-a4a7-41b2-a46c-0a43603fe3e7-kube-api-access-d78sx\") pod \"dnsmasq-dns-769db7c785-dd9lp\" (UID: \"7eb3222b-a4a7-41b2-a46c-0a43603fe3e7\") " pod="openstack/dnsmasq-dns-769db7c785-dd9lp" Sep 29 09:50:34 crc kubenswrapper[4779]: I0929 09:50:34.948984 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-769db7c785-dd9lp" Sep 29 09:50:35 crc kubenswrapper[4779]: I0929 09:50:35.388486 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-769db7c785-dd9lp"] Sep 29 09:50:35 crc kubenswrapper[4779]: W0929 09:50:35.401570 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7eb3222b_a4a7_41b2_a46c_0a43603fe3e7.slice/crio-63114b7504e5bb8928427c4d1b5efdbe4ee16d18749c09395adf487508f6e33e WatchSource:0}: Error finding container 63114b7504e5bb8928427c4d1b5efdbe4ee16d18749c09395adf487508f6e33e: Status 404 returned error can't find the container with id 63114b7504e5bb8928427c4d1b5efdbe4ee16d18749c09395adf487508f6e33e Sep 29 09:50:35 crc kubenswrapper[4779]: I0929 09:50:35.866496 4779 generic.go:334] "Generic (PLEG): container finished" podID="7eb3222b-a4a7-41b2-a46c-0a43603fe3e7" containerID="a781d91bc32d0761cc88ff22e5bdaf4fefc948be30b0f4813a9c5bacfeba6e51" exitCode=0 Sep 29 09:50:35 crc kubenswrapper[4779]: I0929 09:50:35.866606 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-769db7c785-dd9lp" event={"ID":"7eb3222b-a4a7-41b2-a46c-0a43603fe3e7","Type":"ContainerDied","Data":"a781d91bc32d0761cc88ff22e5bdaf4fefc948be30b0f4813a9c5bacfeba6e51"} Sep 29 09:50:35 crc kubenswrapper[4779]: I0929 09:50:35.866748 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-769db7c785-dd9lp" event={"ID":"7eb3222b-a4a7-41b2-a46c-0a43603fe3e7","Type":"ContainerStarted","Data":"63114b7504e5bb8928427c4d1b5efdbe4ee16d18749c09395adf487508f6e33e"} Sep 29 09:50:36 crc kubenswrapper[4779]: I0929 09:50:36.880224 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-769db7c785-dd9lp" event={"ID":"7eb3222b-a4a7-41b2-a46c-0a43603fe3e7","Type":"ContainerStarted","Data":"2127a5e642bba05c2507422aefe585dfee8c5f8076964ddf6772533a520e6ede"} Sep 29 09:50:36 crc kubenswrapper[4779]: I0929 09:50:36.881781 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-769db7c785-dd9lp" Sep 29 09:50:36 crc kubenswrapper[4779]: I0929 09:50:36.906161 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-769db7c785-dd9lp" podStartSLOduration=2.906140015 podStartE2EDuration="2.906140015s" podCreationTimestamp="2025-09-29 09:50:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:50:36.902941783 +0000 UTC m=+1268.884265747" watchObservedRunningTime="2025-09-29 09:50:36.906140015 +0000 UTC m=+1268.887463929" Sep 29 09:50:44 crc kubenswrapper[4779]: I0929 09:50:44.951482 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-769db7c785-dd9lp" Sep 29 09:50:45 crc kubenswrapper[4779]: I0929 09:50:45.022499 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6ddb8fc477-92j54"] Sep 29 09:50:45 crc kubenswrapper[4779]: I0929 09:50:45.022852 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6ddb8fc477-92j54" podUID="aab89cc0-c414-4e7f-8ca9-6a07e6b232e8" containerName="dnsmasq-dns" containerID="cri-o://a60de987efc29d7265c45ce82cbb435867469181ab0b0df8f3bc9ab960a36e89" gracePeriod=10 Sep 29 09:50:45 crc kubenswrapper[4779]: I0929 09:50:45.213959 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-84f9969b4f-s8nfv"] Sep 29 09:50:45 crc kubenswrapper[4779]: I0929 09:50:45.215770 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84f9969b4f-s8nfv" Sep 29 09:50:45 crc kubenswrapper[4779]: I0929 09:50:45.223001 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84f9969b4f-s8nfv"] Sep 29 09:50:45 crc kubenswrapper[4779]: I0929 09:50:45.387199 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ab068d40-9967-4674-a654-b0b2dbcfd76f-dns-svc\") pod \"dnsmasq-dns-84f9969b4f-s8nfv\" (UID: \"ab068d40-9967-4674-a654-b0b2dbcfd76f\") " pod="openstack/dnsmasq-dns-84f9969b4f-s8nfv" Sep 29 09:50:45 crc kubenswrapper[4779]: I0929 09:50:45.387237 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7bl64\" (UniqueName: \"kubernetes.io/projected/ab068d40-9967-4674-a654-b0b2dbcfd76f-kube-api-access-7bl64\") pod \"dnsmasq-dns-84f9969b4f-s8nfv\" (UID: \"ab068d40-9967-4674-a654-b0b2dbcfd76f\") " pod="openstack/dnsmasq-dns-84f9969b4f-s8nfv" Sep 29 09:50:45 crc kubenswrapper[4779]: I0929 09:50:45.387315 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/ab068d40-9967-4674-a654-b0b2dbcfd76f-openstack-edpm-ipam\") pod \"dnsmasq-dns-84f9969b4f-s8nfv\" (UID: \"ab068d40-9967-4674-a654-b0b2dbcfd76f\") " pod="openstack/dnsmasq-dns-84f9969b4f-s8nfv" Sep 29 09:50:45 crc kubenswrapper[4779]: I0929 09:50:45.387628 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ab068d40-9967-4674-a654-b0b2dbcfd76f-config\") pod \"dnsmasq-dns-84f9969b4f-s8nfv\" (UID: \"ab068d40-9967-4674-a654-b0b2dbcfd76f\") " pod="openstack/dnsmasq-dns-84f9969b4f-s8nfv" Sep 29 09:50:45 crc kubenswrapper[4779]: I0929 09:50:45.387683 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ab068d40-9967-4674-a654-b0b2dbcfd76f-ovsdbserver-sb\") pod \"dnsmasq-dns-84f9969b4f-s8nfv\" (UID: \"ab068d40-9967-4674-a654-b0b2dbcfd76f\") " pod="openstack/dnsmasq-dns-84f9969b4f-s8nfv" Sep 29 09:50:45 crc kubenswrapper[4779]: I0929 09:50:45.387742 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ab068d40-9967-4674-a654-b0b2dbcfd76f-ovsdbserver-nb\") pod \"dnsmasq-dns-84f9969b4f-s8nfv\" (UID: \"ab068d40-9967-4674-a654-b0b2dbcfd76f\") " pod="openstack/dnsmasq-dns-84f9969b4f-s8nfv" Sep 29 09:50:45 crc kubenswrapper[4779]: I0929 09:50:45.502536 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/ab068d40-9967-4674-a654-b0b2dbcfd76f-openstack-edpm-ipam\") pod \"dnsmasq-dns-84f9969b4f-s8nfv\" (UID: \"ab068d40-9967-4674-a654-b0b2dbcfd76f\") " pod="openstack/dnsmasq-dns-84f9969b4f-s8nfv" Sep 29 09:50:45 crc kubenswrapper[4779]: I0929 09:50:45.502756 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ab068d40-9967-4674-a654-b0b2dbcfd76f-config\") pod \"dnsmasq-dns-84f9969b4f-s8nfv\" (UID: \"ab068d40-9967-4674-a654-b0b2dbcfd76f\") " pod="openstack/dnsmasq-dns-84f9969b4f-s8nfv" Sep 29 09:50:45 crc kubenswrapper[4779]: I0929 09:50:45.502785 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ab068d40-9967-4674-a654-b0b2dbcfd76f-ovsdbserver-sb\") pod \"dnsmasq-dns-84f9969b4f-s8nfv\" (UID: \"ab068d40-9967-4674-a654-b0b2dbcfd76f\") " pod="openstack/dnsmasq-dns-84f9969b4f-s8nfv" Sep 29 09:50:45 crc kubenswrapper[4779]: I0929 09:50:45.502819 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ab068d40-9967-4674-a654-b0b2dbcfd76f-ovsdbserver-nb\") pod \"dnsmasq-dns-84f9969b4f-s8nfv\" (UID: \"ab068d40-9967-4674-a654-b0b2dbcfd76f\") " pod="openstack/dnsmasq-dns-84f9969b4f-s8nfv" Sep 29 09:50:45 crc kubenswrapper[4779]: I0929 09:50:45.502958 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ab068d40-9967-4674-a654-b0b2dbcfd76f-dns-svc\") pod \"dnsmasq-dns-84f9969b4f-s8nfv\" (UID: \"ab068d40-9967-4674-a654-b0b2dbcfd76f\") " pod="openstack/dnsmasq-dns-84f9969b4f-s8nfv" Sep 29 09:50:45 crc kubenswrapper[4779]: I0929 09:50:45.502980 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7bl64\" (UniqueName: \"kubernetes.io/projected/ab068d40-9967-4674-a654-b0b2dbcfd76f-kube-api-access-7bl64\") pod \"dnsmasq-dns-84f9969b4f-s8nfv\" (UID: \"ab068d40-9967-4674-a654-b0b2dbcfd76f\") " pod="openstack/dnsmasq-dns-84f9969b4f-s8nfv" Sep 29 09:50:45 crc kubenswrapper[4779]: I0929 09:50:45.506024 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/ab068d40-9967-4674-a654-b0b2dbcfd76f-openstack-edpm-ipam\") pod \"dnsmasq-dns-84f9969b4f-s8nfv\" (UID: \"ab068d40-9967-4674-a654-b0b2dbcfd76f\") " pod="openstack/dnsmasq-dns-84f9969b4f-s8nfv" Sep 29 09:50:45 crc kubenswrapper[4779]: I0929 09:50:45.507375 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ab068d40-9967-4674-a654-b0b2dbcfd76f-ovsdbserver-sb\") pod \"dnsmasq-dns-84f9969b4f-s8nfv\" (UID: \"ab068d40-9967-4674-a654-b0b2dbcfd76f\") " pod="openstack/dnsmasq-dns-84f9969b4f-s8nfv" Sep 29 09:50:45 crc kubenswrapper[4779]: I0929 09:50:45.507886 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ab068d40-9967-4674-a654-b0b2dbcfd76f-dns-svc\") pod \"dnsmasq-dns-84f9969b4f-s8nfv\" (UID: \"ab068d40-9967-4674-a654-b0b2dbcfd76f\") " pod="openstack/dnsmasq-dns-84f9969b4f-s8nfv" Sep 29 09:50:45 crc kubenswrapper[4779]: I0929 09:50:45.508106 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ab068d40-9967-4674-a654-b0b2dbcfd76f-config\") pod \"dnsmasq-dns-84f9969b4f-s8nfv\" (UID: \"ab068d40-9967-4674-a654-b0b2dbcfd76f\") " pod="openstack/dnsmasq-dns-84f9969b4f-s8nfv" Sep 29 09:50:45 crc kubenswrapper[4779]: I0929 09:50:45.508647 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ab068d40-9967-4674-a654-b0b2dbcfd76f-ovsdbserver-nb\") pod \"dnsmasq-dns-84f9969b4f-s8nfv\" (UID: \"ab068d40-9967-4674-a654-b0b2dbcfd76f\") " pod="openstack/dnsmasq-dns-84f9969b4f-s8nfv" Sep 29 09:50:45 crc kubenswrapper[4779]: I0929 09:50:45.530096 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7bl64\" (UniqueName: \"kubernetes.io/projected/ab068d40-9967-4674-a654-b0b2dbcfd76f-kube-api-access-7bl64\") pod \"dnsmasq-dns-84f9969b4f-s8nfv\" (UID: \"ab068d40-9967-4674-a654-b0b2dbcfd76f\") " pod="openstack/dnsmasq-dns-84f9969b4f-s8nfv" Sep 29 09:50:45 crc kubenswrapper[4779]: I0929 09:50:45.537620 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84f9969b4f-s8nfv" Sep 29 09:50:45 crc kubenswrapper[4779]: I0929 09:50:45.632299 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6ddb8fc477-92j54" Sep 29 09:50:45 crc kubenswrapper[4779]: I0929 09:50:45.705716 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/aab89cc0-c414-4e7f-8ca9-6a07e6b232e8-ovsdbserver-sb\") pod \"aab89cc0-c414-4e7f-8ca9-6a07e6b232e8\" (UID: \"aab89cc0-c414-4e7f-8ca9-6a07e6b232e8\") " Sep 29 09:50:45 crc kubenswrapper[4779]: I0929 09:50:45.705761 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/aab89cc0-c414-4e7f-8ca9-6a07e6b232e8-ovsdbserver-nb\") pod \"aab89cc0-c414-4e7f-8ca9-6a07e6b232e8\" (UID: \"aab89cc0-c414-4e7f-8ca9-6a07e6b232e8\") " Sep 29 09:50:45 crc kubenswrapper[4779]: I0929 09:50:45.705797 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g7dd4\" (UniqueName: \"kubernetes.io/projected/aab89cc0-c414-4e7f-8ca9-6a07e6b232e8-kube-api-access-g7dd4\") pod \"aab89cc0-c414-4e7f-8ca9-6a07e6b232e8\" (UID: \"aab89cc0-c414-4e7f-8ca9-6a07e6b232e8\") " Sep 29 09:50:45 crc kubenswrapper[4779]: I0929 09:50:45.705858 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/aab89cc0-c414-4e7f-8ca9-6a07e6b232e8-dns-svc\") pod \"aab89cc0-c414-4e7f-8ca9-6a07e6b232e8\" (UID: \"aab89cc0-c414-4e7f-8ca9-6a07e6b232e8\") " Sep 29 09:50:45 crc kubenswrapper[4779]: I0929 09:50:45.705887 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aab89cc0-c414-4e7f-8ca9-6a07e6b232e8-config\") pod \"aab89cc0-c414-4e7f-8ca9-6a07e6b232e8\" (UID: \"aab89cc0-c414-4e7f-8ca9-6a07e6b232e8\") " Sep 29 09:50:45 crc kubenswrapper[4779]: I0929 09:50:45.715392 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aab89cc0-c414-4e7f-8ca9-6a07e6b232e8-kube-api-access-g7dd4" (OuterVolumeSpecName: "kube-api-access-g7dd4") pod "aab89cc0-c414-4e7f-8ca9-6a07e6b232e8" (UID: "aab89cc0-c414-4e7f-8ca9-6a07e6b232e8"). InnerVolumeSpecName "kube-api-access-g7dd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:50:45 crc kubenswrapper[4779]: I0929 09:50:45.774638 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aab89cc0-c414-4e7f-8ca9-6a07e6b232e8-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "aab89cc0-c414-4e7f-8ca9-6a07e6b232e8" (UID: "aab89cc0-c414-4e7f-8ca9-6a07e6b232e8"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:50:45 crc kubenswrapper[4779]: I0929 09:50:45.781450 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aab89cc0-c414-4e7f-8ca9-6a07e6b232e8-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "aab89cc0-c414-4e7f-8ca9-6a07e6b232e8" (UID: "aab89cc0-c414-4e7f-8ca9-6a07e6b232e8"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:50:45 crc kubenswrapper[4779]: I0929 09:50:45.791571 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aab89cc0-c414-4e7f-8ca9-6a07e6b232e8-config" (OuterVolumeSpecName: "config") pod "aab89cc0-c414-4e7f-8ca9-6a07e6b232e8" (UID: "aab89cc0-c414-4e7f-8ca9-6a07e6b232e8"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:50:45 crc kubenswrapper[4779]: I0929 09:50:45.797324 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aab89cc0-c414-4e7f-8ca9-6a07e6b232e8-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "aab89cc0-c414-4e7f-8ca9-6a07e6b232e8" (UID: "aab89cc0-c414-4e7f-8ca9-6a07e6b232e8"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:50:45 crc kubenswrapper[4779]: I0929 09:50:45.807861 4779 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/aab89cc0-c414-4e7f-8ca9-6a07e6b232e8-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 09:50:45 crc kubenswrapper[4779]: I0929 09:50:45.808061 4779 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/aab89cc0-c414-4e7f-8ca9-6a07e6b232e8-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 09:50:45 crc kubenswrapper[4779]: I0929 09:50:45.808133 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g7dd4\" (UniqueName: \"kubernetes.io/projected/aab89cc0-c414-4e7f-8ca9-6a07e6b232e8-kube-api-access-g7dd4\") on node \"crc\" DevicePath \"\"" Sep 29 09:50:45 crc kubenswrapper[4779]: I0929 09:50:45.808220 4779 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/aab89cc0-c414-4e7f-8ca9-6a07e6b232e8-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 09:50:45 crc kubenswrapper[4779]: I0929 09:50:45.808248 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aab89cc0-c414-4e7f-8ca9-6a07e6b232e8-config\") on node \"crc\" DevicePath \"\"" Sep 29 09:50:45 crc kubenswrapper[4779]: I0929 09:50:45.971425 4779 generic.go:334] "Generic (PLEG): container finished" podID="aab89cc0-c414-4e7f-8ca9-6a07e6b232e8" containerID="a60de987efc29d7265c45ce82cbb435867469181ab0b0df8f3bc9ab960a36e89" exitCode=0 Sep 29 09:50:45 crc kubenswrapper[4779]: I0929 09:50:45.971473 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6ddb8fc477-92j54" event={"ID":"aab89cc0-c414-4e7f-8ca9-6a07e6b232e8","Type":"ContainerDied","Data":"a60de987efc29d7265c45ce82cbb435867469181ab0b0df8f3bc9ab960a36e89"} Sep 29 09:50:45 crc kubenswrapper[4779]: I0929 09:50:45.971504 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6ddb8fc477-92j54" event={"ID":"aab89cc0-c414-4e7f-8ca9-6a07e6b232e8","Type":"ContainerDied","Data":"d2dcf685b74f5f706a258dbeaf1b8630013ba2db333fbb4407e408ca0fc83563"} Sep 29 09:50:45 crc kubenswrapper[4779]: I0929 09:50:45.971535 4779 scope.go:117] "RemoveContainer" containerID="a60de987efc29d7265c45ce82cbb435867469181ab0b0df8f3bc9ab960a36e89" Sep 29 09:50:45 crc kubenswrapper[4779]: I0929 09:50:45.971695 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6ddb8fc477-92j54" Sep 29 09:50:46 crc kubenswrapper[4779]: I0929 09:50:46.014762 4779 scope.go:117] "RemoveContainer" containerID="71be26d0a1a7a34f2f74bd1de50478265755b7e0ecc6904816afd739e064e477" Sep 29 09:50:46 crc kubenswrapper[4779]: I0929 09:50:46.019340 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6ddb8fc477-92j54"] Sep 29 09:50:46 crc kubenswrapper[4779]: I0929 09:50:46.038559 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6ddb8fc477-92j54"] Sep 29 09:50:46 crc kubenswrapper[4779]: I0929 09:50:46.055359 4779 scope.go:117] "RemoveContainer" containerID="a60de987efc29d7265c45ce82cbb435867469181ab0b0df8f3bc9ab960a36e89" Sep 29 09:50:46 crc kubenswrapper[4779]: E0929 09:50:46.055783 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a60de987efc29d7265c45ce82cbb435867469181ab0b0df8f3bc9ab960a36e89\": container with ID starting with a60de987efc29d7265c45ce82cbb435867469181ab0b0df8f3bc9ab960a36e89 not found: ID does not exist" containerID="a60de987efc29d7265c45ce82cbb435867469181ab0b0df8f3bc9ab960a36e89" Sep 29 09:50:46 crc kubenswrapper[4779]: I0929 09:50:46.055811 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a60de987efc29d7265c45ce82cbb435867469181ab0b0df8f3bc9ab960a36e89"} err="failed to get container status \"a60de987efc29d7265c45ce82cbb435867469181ab0b0df8f3bc9ab960a36e89\": rpc error: code = NotFound desc = could not find container \"a60de987efc29d7265c45ce82cbb435867469181ab0b0df8f3bc9ab960a36e89\": container with ID starting with a60de987efc29d7265c45ce82cbb435867469181ab0b0df8f3bc9ab960a36e89 not found: ID does not exist" Sep 29 09:50:46 crc kubenswrapper[4779]: I0929 09:50:46.055832 4779 scope.go:117] "RemoveContainer" containerID="71be26d0a1a7a34f2f74bd1de50478265755b7e0ecc6904816afd739e064e477" Sep 29 09:50:46 crc kubenswrapper[4779]: E0929 09:50:46.056042 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"71be26d0a1a7a34f2f74bd1de50478265755b7e0ecc6904816afd739e064e477\": container with ID starting with 71be26d0a1a7a34f2f74bd1de50478265755b7e0ecc6904816afd739e064e477 not found: ID does not exist" containerID="71be26d0a1a7a34f2f74bd1de50478265755b7e0ecc6904816afd739e064e477" Sep 29 09:50:46 crc kubenswrapper[4779]: I0929 09:50:46.056078 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"71be26d0a1a7a34f2f74bd1de50478265755b7e0ecc6904816afd739e064e477"} err="failed to get container status \"71be26d0a1a7a34f2f74bd1de50478265755b7e0ecc6904816afd739e064e477\": rpc error: code = NotFound desc = could not find container \"71be26d0a1a7a34f2f74bd1de50478265755b7e0ecc6904816afd739e064e477\": container with ID starting with 71be26d0a1a7a34f2f74bd1de50478265755b7e0ecc6904816afd739e064e477 not found: ID does not exist" Sep 29 09:50:46 crc kubenswrapper[4779]: I0929 09:50:46.075266 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84f9969b4f-s8nfv"] Sep 29 09:50:46 crc kubenswrapper[4779]: I0929 09:50:46.726248 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aab89cc0-c414-4e7f-8ca9-6a07e6b232e8" path="/var/lib/kubelet/pods/aab89cc0-c414-4e7f-8ca9-6a07e6b232e8/volumes" Sep 29 09:50:46 crc kubenswrapper[4779]: I0929 09:50:46.966877 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 09:50:46 crc kubenswrapper[4779]: I0929 09:50:46.966965 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 09:50:46 crc kubenswrapper[4779]: I0929 09:50:46.967007 4779 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" Sep 29 09:50:46 crc kubenswrapper[4779]: I0929 09:50:46.967779 4779 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3c3dea31b0e7eb572818f728d7b074a8ac3d1e14ba537ebb0fed907a0fa98d28"} pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 09:50:46 crc kubenswrapper[4779]: I0929 09:50:46.967853 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" containerID="cri-o://3c3dea31b0e7eb572818f728d7b074a8ac3d1e14ba537ebb0fed907a0fa98d28" gracePeriod=600 Sep 29 09:50:46 crc kubenswrapper[4779]: I0929 09:50:46.986283 4779 generic.go:334] "Generic (PLEG): container finished" podID="ab068d40-9967-4674-a654-b0b2dbcfd76f" containerID="1c7a5ce8c8cd6379e4ed6a98bb49a8c30b1e69a9dbe7235cfa95b1b65dd57a4b" exitCode=0 Sep 29 09:50:46 crc kubenswrapper[4779]: I0929 09:50:46.986323 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84f9969b4f-s8nfv" event={"ID":"ab068d40-9967-4674-a654-b0b2dbcfd76f","Type":"ContainerDied","Data":"1c7a5ce8c8cd6379e4ed6a98bb49a8c30b1e69a9dbe7235cfa95b1b65dd57a4b"} Sep 29 09:50:46 crc kubenswrapper[4779]: I0929 09:50:46.986366 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84f9969b4f-s8nfv" event={"ID":"ab068d40-9967-4674-a654-b0b2dbcfd76f","Type":"ContainerStarted","Data":"30ad75bf6093dbb581411a21021335abb5b95d24179493b2aaf224eabbcd5be4"} Sep 29 09:50:47 crc kubenswrapper[4779]: I0929 09:50:47.999010 4779 generic.go:334] "Generic (PLEG): container finished" podID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerID="3c3dea31b0e7eb572818f728d7b074a8ac3d1e14ba537ebb0fed907a0fa98d28" exitCode=0 Sep 29 09:50:47 crc kubenswrapper[4779]: I0929 09:50:47.999074 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" event={"ID":"f1a5d3a7-37d9-4a87-864c-e4af7f504a19","Type":"ContainerDied","Data":"3c3dea31b0e7eb572818f728d7b074a8ac3d1e14ba537ebb0fed907a0fa98d28"} Sep 29 09:50:47 crc kubenswrapper[4779]: I0929 09:50:47.999593 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" event={"ID":"f1a5d3a7-37d9-4a87-864c-e4af7f504a19","Type":"ContainerStarted","Data":"9b76d7f97cdc0312b7d80786a3004ed45f01d356ab8c91f3656d0fd71503e713"} Sep 29 09:50:47 crc kubenswrapper[4779]: I0929 09:50:47.999616 4779 scope.go:117] "RemoveContainer" containerID="fcd55a765537b74d7fe03acdaa880fd723f800adde7aab67d7d2e84cbd82c102" Sep 29 09:50:48 crc kubenswrapper[4779]: I0929 09:50:48.003995 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84f9969b4f-s8nfv" event={"ID":"ab068d40-9967-4674-a654-b0b2dbcfd76f","Type":"ContainerStarted","Data":"8d46906a3c2144ff08e168d8bde96f3bad5ad8c5ecf7face5e4c66dcbc65d88b"} Sep 29 09:50:48 crc kubenswrapper[4779]: I0929 09:50:48.004189 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-84f9969b4f-s8nfv" Sep 29 09:50:48 crc kubenswrapper[4779]: I0929 09:50:48.043956 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-84f9969b4f-s8nfv" podStartSLOduration=3.043936268 podStartE2EDuration="3.043936268s" podCreationTimestamp="2025-09-29 09:50:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:50:48.03358409 +0000 UTC m=+1280.014908024" watchObservedRunningTime="2025-09-29 09:50:48.043936268 +0000 UTC m=+1280.025260182" Sep 29 09:50:55 crc kubenswrapper[4779]: I0929 09:50:55.539703 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-84f9969b4f-s8nfv" Sep 29 09:50:55 crc kubenswrapper[4779]: I0929 09:50:55.607355 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-769db7c785-dd9lp"] Sep 29 09:50:55 crc kubenswrapper[4779]: I0929 09:50:55.607615 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-769db7c785-dd9lp" podUID="7eb3222b-a4a7-41b2-a46c-0a43603fe3e7" containerName="dnsmasq-dns" containerID="cri-o://2127a5e642bba05c2507422aefe585dfee8c5f8076964ddf6772533a520e6ede" gracePeriod=10 Sep 29 09:50:56 crc kubenswrapper[4779]: I0929 09:50:56.110774 4779 generic.go:334] "Generic (PLEG): container finished" podID="7eb3222b-a4a7-41b2-a46c-0a43603fe3e7" containerID="2127a5e642bba05c2507422aefe585dfee8c5f8076964ddf6772533a520e6ede" exitCode=0 Sep 29 09:50:56 crc kubenswrapper[4779]: I0929 09:50:56.110819 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-769db7c785-dd9lp" event={"ID":"7eb3222b-a4a7-41b2-a46c-0a43603fe3e7","Type":"ContainerDied","Data":"2127a5e642bba05c2507422aefe585dfee8c5f8076964ddf6772533a520e6ede"} Sep 29 09:50:56 crc kubenswrapper[4779]: I0929 09:50:56.111227 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-769db7c785-dd9lp" event={"ID":"7eb3222b-a4a7-41b2-a46c-0a43603fe3e7","Type":"ContainerDied","Data":"63114b7504e5bb8928427c4d1b5efdbe4ee16d18749c09395adf487508f6e33e"} Sep 29 09:50:56 crc kubenswrapper[4779]: I0929 09:50:56.111247 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="63114b7504e5bb8928427c4d1b5efdbe4ee16d18749c09395adf487508f6e33e" Sep 29 09:50:56 crc kubenswrapper[4779]: I0929 09:50:56.112645 4779 generic.go:334] "Generic (PLEG): container finished" podID="9a6cc117-b53c-4f46-bbe7-721a5e656cc4" containerID="99a8b9774d4c693c873a0d8fe718fbf8f3c4c91c18ee533dbc6b5646f934410e" exitCode=0 Sep 29 09:50:56 crc kubenswrapper[4779]: I0929 09:50:56.112682 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"9a6cc117-b53c-4f46-bbe7-721a5e656cc4","Type":"ContainerDied","Data":"99a8b9774d4c693c873a0d8fe718fbf8f3c4c91c18ee533dbc6b5646f934410e"} Sep 29 09:50:56 crc kubenswrapper[4779]: I0929 09:50:56.349363 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-769db7c785-dd9lp" Sep 29 09:50:56 crc kubenswrapper[4779]: I0929 09:50:56.522648 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7eb3222b-a4a7-41b2-a46c-0a43603fe3e7-ovsdbserver-nb\") pod \"7eb3222b-a4a7-41b2-a46c-0a43603fe3e7\" (UID: \"7eb3222b-a4a7-41b2-a46c-0a43603fe3e7\") " Sep 29 09:50:56 crc kubenswrapper[4779]: I0929 09:50:56.522725 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7eb3222b-a4a7-41b2-a46c-0a43603fe3e7-ovsdbserver-sb\") pod \"7eb3222b-a4a7-41b2-a46c-0a43603fe3e7\" (UID: \"7eb3222b-a4a7-41b2-a46c-0a43603fe3e7\") " Sep 29 09:50:56 crc kubenswrapper[4779]: I0929 09:50:56.522796 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/7eb3222b-a4a7-41b2-a46c-0a43603fe3e7-openstack-edpm-ipam\") pod \"7eb3222b-a4a7-41b2-a46c-0a43603fe3e7\" (UID: \"7eb3222b-a4a7-41b2-a46c-0a43603fe3e7\") " Sep 29 09:50:56 crc kubenswrapper[4779]: I0929 09:50:56.522818 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7eb3222b-a4a7-41b2-a46c-0a43603fe3e7-dns-svc\") pod \"7eb3222b-a4a7-41b2-a46c-0a43603fe3e7\" (UID: \"7eb3222b-a4a7-41b2-a46c-0a43603fe3e7\") " Sep 29 09:50:56 crc kubenswrapper[4779]: I0929 09:50:56.522856 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7eb3222b-a4a7-41b2-a46c-0a43603fe3e7-config\") pod \"7eb3222b-a4a7-41b2-a46c-0a43603fe3e7\" (UID: \"7eb3222b-a4a7-41b2-a46c-0a43603fe3e7\") " Sep 29 09:50:56 crc kubenswrapper[4779]: I0929 09:50:56.522926 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d78sx\" (UniqueName: \"kubernetes.io/projected/7eb3222b-a4a7-41b2-a46c-0a43603fe3e7-kube-api-access-d78sx\") pod \"7eb3222b-a4a7-41b2-a46c-0a43603fe3e7\" (UID: \"7eb3222b-a4a7-41b2-a46c-0a43603fe3e7\") " Sep 29 09:50:56 crc kubenswrapper[4779]: I0929 09:50:56.532187 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7eb3222b-a4a7-41b2-a46c-0a43603fe3e7-kube-api-access-d78sx" (OuterVolumeSpecName: "kube-api-access-d78sx") pod "7eb3222b-a4a7-41b2-a46c-0a43603fe3e7" (UID: "7eb3222b-a4a7-41b2-a46c-0a43603fe3e7"). InnerVolumeSpecName "kube-api-access-d78sx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:50:56 crc kubenswrapper[4779]: I0929 09:50:56.573130 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7eb3222b-a4a7-41b2-a46c-0a43603fe3e7-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "7eb3222b-a4a7-41b2-a46c-0a43603fe3e7" (UID: "7eb3222b-a4a7-41b2-a46c-0a43603fe3e7"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:50:56 crc kubenswrapper[4779]: I0929 09:50:56.585303 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7eb3222b-a4a7-41b2-a46c-0a43603fe3e7-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "7eb3222b-a4a7-41b2-a46c-0a43603fe3e7" (UID: "7eb3222b-a4a7-41b2-a46c-0a43603fe3e7"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:50:56 crc kubenswrapper[4779]: I0929 09:50:56.592565 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7eb3222b-a4a7-41b2-a46c-0a43603fe3e7-config" (OuterVolumeSpecName: "config") pod "7eb3222b-a4a7-41b2-a46c-0a43603fe3e7" (UID: "7eb3222b-a4a7-41b2-a46c-0a43603fe3e7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:50:56 crc kubenswrapper[4779]: I0929 09:50:56.593582 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7eb3222b-a4a7-41b2-a46c-0a43603fe3e7-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "7eb3222b-a4a7-41b2-a46c-0a43603fe3e7" (UID: "7eb3222b-a4a7-41b2-a46c-0a43603fe3e7"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:50:56 crc kubenswrapper[4779]: I0929 09:50:56.597604 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7eb3222b-a4a7-41b2-a46c-0a43603fe3e7-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "7eb3222b-a4a7-41b2-a46c-0a43603fe3e7" (UID: "7eb3222b-a4a7-41b2-a46c-0a43603fe3e7"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 09:50:56 crc kubenswrapper[4779]: I0929 09:50:56.625325 4779 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7eb3222b-a4a7-41b2-a46c-0a43603fe3e7-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 09:50:56 crc kubenswrapper[4779]: I0929 09:50:56.625411 4779 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/7eb3222b-a4a7-41b2-a46c-0a43603fe3e7-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Sep 29 09:50:56 crc kubenswrapper[4779]: I0929 09:50:56.625430 4779 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7eb3222b-a4a7-41b2-a46c-0a43603fe3e7-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 09:50:56 crc kubenswrapper[4779]: I0929 09:50:56.625441 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7eb3222b-a4a7-41b2-a46c-0a43603fe3e7-config\") on node \"crc\" DevicePath \"\"" Sep 29 09:50:56 crc kubenswrapper[4779]: I0929 09:50:56.625450 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d78sx\" (UniqueName: \"kubernetes.io/projected/7eb3222b-a4a7-41b2-a46c-0a43603fe3e7-kube-api-access-d78sx\") on node \"crc\" DevicePath \"\"" Sep 29 09:50:56 crc kubenswrapper[4779]: I0929 09:50:56.625462 4779 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7eb3222b-a4a7-41b2-a46c-0a43603fe3e7-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 09:50:57 crc kubenswrapper[4779]: I0929 09:50:57.124394 4779 generic.go:334] "Generic (PLEG): container finished" podID="31656a9a-a9ca-46d0-b682-29e0ddde8ff7" containerID="d72026f30b9e46df85773deb10cf49f0a134b3ff1bdcbf96a4ec5d523772b332" exitCode=0 Sep 29 09:50:57 crc kubenswrapper[4779]: I0929 09:50:57.124473 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"31656a9a-a9ca-46d0-b682-29e0ddde8ff7","Type":"ContainerDied","Data":"d72026f30b9e46df85773deb10cf49f0a134b3ff1bdcbf96a4ec5d523772b332"} Sep 29 09:50:57 crc kubenswrapper[4779]: I0929 09:50:57.129021 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-769db7c785-dd9lp" Sep 29 09:50:57 crc kubenswrapper[4779]: I0929 09:50:57.129010 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"9a6cc117-b53c-4f46-bbe7-721a5e656cc4","Type":"ContainerStarted","Data":"7b1306b3e5ef270d0052346fc26d5e6d743b79ccddb490ddf8ff3f4b00546f0a"} Sep 29 09:50:57 crc kubenswrapper[4779]: I0929 09:50:57.129455 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Sep 29 09:50:57 crc kubenswrapper[4779]: I0929 09:50:57.200017 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=34.199997178 podStartE2EDuration="34.199997178s" podCreationTimestamp="2025-09-29 09:50:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:50:57.190536886 +0000 UTC m=+1289.171860800" watchObservedRunningTime="2025-09-29 09:50:57.199997178 +0000 UTC m=+1289.181321082" Sep 29 09:50:57 crc kubenswrapper[4779]: I0929 09:50:57.220503 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-769db7c785-dd9lp"] Sep 29 09:50:57 crc kubenswrapper[4779]: I0929 09:50:57.244716 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-769db7c785-dd9lp"] Sep 29 09:50:58 crc kubenswrapper[4779]: I0929 09:50:58.139396 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"31656a9a-a9ca-46d0-b682-29e0ddde8ff7","Type":"ContainerStarted","Data":"f33cd143ae9170ec2c9c2b3bcee5ca76b917c226acd9446ab5b55aaaf31a68eb"} Sep 29 09:50:58 crc kubenswrapper[4779]: I0929 09:50:58.140247 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:50:58 crc kubenswrapper[4779]: I0929 09:50:58.161478 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=34.16146367 podStartE2EDuration="34.16146367s" podCreationTimestamp="2025-09-29 09:50:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 09:50:58.160833862 +0000 UTC m=+1290.142157766" watchObservedRunningTime="2025-09-29 09:50:58.16146367 +0000 UTC m=+1290.142787574" Sep 29 09:50:58 crc kubenswrapper[4779]: I0929 09:50:58.726628 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7eb3222b-a4a7-41b2-a46c-0a43603fe3e7" path="/var/lib/kubelet/pods/7eb3222b-a4a7-41b2-a46c-0a43603fe3e7/volumes" Sep 29 09:51:01 crc kubenswrapper[4779]: I0929 09:51:01.475390 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wmq5f"] Sep 29 09:51:01 crc kubenswrapper[4779]: E0929 09:51:01.476078 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7eb3222b-a4a7-41b2-a46c-0a43603fe3e7" containerName="dnsmasq-dns" Sep 29 09:51:01 crc kubenswrapper[4779]: I0929 09:51:01.476095 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="7eb3222b-a4a7-41b2-a46c-0a43603fe3e7" containerName="dnsmasq-dns" Sep 29 09:51:01 crc kubenswrapper[4779]: E0929 09:51:01.476111 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aab89cc0-c414-4e7f-8ca9-6a07e6b232e8" containerName="dnsmasq-dns" Sep 29 09:51:01 crc kubenswrapper[4779]: I0929 09:51:01.476118 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="aab89cc0-c414-4e7f-8ca9-6a07e6b232e8" containerName="dnsmasq-dns" Sep 29 09:51:01 crc kubenswrapper[4779]: E0929 09:51:01.476140 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7eb3222b-a4a7-41b2-a46c-0a43603fe3e7" containerName="init" Sep 29 09:51:01 crc kubenswrapper[4779]: I0929 09:51:01.476148 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="7eb3222b-a4a7-41b2-a46c-0a43603fe3e7" containerName="init" Sep 29 09:51:01 crc kubenswrapper[4779]: E0929 09:51:01.476163 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aab89cc0-c414-4e7f-8ca9-6a07e6b232e8" containerName="init" Sep 29 09:51:01 crc kubenswrapper[4779]: I0929 09:51:01.476170 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="aab89cc0-c414-4e7f-8ca9-6a07e6b232e8" containerName="init" Sep 29 09:51:01 crc kubenswrapper[4779]: I0929 09:51:01.476410 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="7eb3222b-a4a7-41b2-a46c-0a43603fe3e7" containerName="dnsmasq-dns" Sep 29 09:51:01 crc kubenswrapper[4779]: I0929 09:51:01.476441 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="aab89cc0-c414-4e7f-8ca9-6a07e6b232e8" containerName="dnsmasq-dns" Sep 29 09:51:01 crc kubenswrapper[4779]: I0929 09:51:01.482771 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wmq5f" Sep 29 09:51:01 crc kubenswrapper[4779]: I0929 09:51:01.485975 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 09:51:01 crc kubenswrapper[4779]: I0929 09:51:01.486089 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-l7nsb" Sep 29 09:51:01 crc kubenswrapper[4779]: I0929 09:51:01.486206 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 09:51:01 crc kubenswrapper[4779]: I0929 09:51:01.486283 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 09:51:01 crc kubenswrapper[4779]: I0929 09:51:01.496413 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wmq5f"] Sep 29 09:51:01 crc kubenswrapper[4779]: I0929 09:51:01.617684 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t256r\" (UniqueName: \"kubernetes.io/projected/c99c3706-4911-4e0f-b822-23d0eeb2d2d9-kube-api-access-t256r\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wmq5f\" (UID: \"c99c3706-4911-4e0f-b822-23d0eeb2d2d9\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wmq5f" Sep 29 09:51:01 crc kubenswrapper[4779]: I0929 09:51:01.618097 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c99c3706-4911-4e0f-b822-23d0eeb2d2d9-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wmq5f\" (UID: \"c99c3706-4911-4e0f-b822-23d0eeb2d2d9\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wmq5f" Sep 29 09:51:01 crc kubenswrapper[4779]: I0929 09:51:01.618252 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c99c3706-4911-4e0f-b822-23d0eeb2d2d9-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wmq5f\" (UID: \"c99c3706-4911-4e0f-b822-23d0eeb2d2d9\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wmq5f" Sep 29 09:51:01 crc kubenswrapper[4779]: I0929 09:51:01.618340 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c99c3706-4911-4e0f-b822-23d0eeb2d2d9-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wmq5f\" (UID: \"c99c3706-4911-4e0f-b822-23d0eeb2d2d9\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wmq5f" Sep 29 09:51:01 crc kubenswrapper[4779]: I0929 09:51:01.719746 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c99c3706-4911-4e0f-b822-23d0eeb2d2d9-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wmq5f\" (UID: \"c99c3706-4911-4e0f-b822-23d0eeb2d2d9\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wmq5f" Sep 29 09:51:01 crc kubenswrapper[4779]: I0929 09:51:01.719806 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c99c3706-4911-4e0f-b822-23d0eeb2d2d9-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wmq5f\" (UID: \"c99c3706-4911-4e0f-b822-23d0eeb2d2d9\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wmq5f" Sep 29 09:51:01 crc kubenswrapper[4779]: I0929 09:51:01.719962 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t256r\" (UniqueName: \"kubernetes.io/projected/c99c3706-4911-4e0f-b822-23d0eeb2d2d9-kube-api-access-t256r\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wmq5f\" (UID: \"c99c3706-4911-4e0f-b822-23d0eeb2d2d9\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wmq5f" Sep 29 09:51:01 crc kubenswrapper[4779]: I0929 09:51:01.720031 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c99c3706-4911-4e0f-b822-23d0eeb2d2d9-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wmq5f\" (UID: \"c99c3706-4911-4e0f-b822-23d0eeb2d2d9\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wmq5f" Sep 29 09:51:01 crc kubenswrapper[4779]: I0929 09:51:01.727050 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c99c3706-4911-4e0f-b822-23d0eeb2d2d9-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wmq5f\" (UID: \"c99c3706-4911-4e0f-b822-23d0eeb2d2d9\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wmq5f" Sep 29 09:51:01 crc kubenswrapper[4779]: I0929 09:51:01.727557 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c99c3706-4911-4e0f-b822-23d0eeb2d2d9-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wmq5f\" (UID: \"c99c3706-4911-4e0f-b822-23d0eeb2d2d9\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wmq5f" Sep 29 09:51:01 crc kubenswrapper[4779]: I0929 09:51:01.728366 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c99c3706-4911-4e0f-b822-23d0eeb2d2d9-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wmq5f\" (UID: \"c99c3706-4911-4e0f-b822-23d0eeb2d2d9\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wmq5f" Sep 29 09:51:01 crc kubenswrapper[4779]: I0929 09:51:01.738721 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t256r\" (UniqueName: \"kubernetes.io/projected/c99c3706-4911-4e0f-b822-23d0eeb2d2d9-kube-api-access-t256r\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wmq5f\" (UID: \"c99c3706-4911-4e0f-b822-23d0eeb2d2d9\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wmq5f" Sep 29 09:51:01 crc kubenswrapper[4779]: I0929 09:51:01.809310 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wmq5f" Sep 29 09:51:02 crc kubenswrapper[4779]: I0929 09:51:02.344747 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wmq5f"] Sep 29 09:51:02 crc kubenswrapper[4779]: W0929 09:51:02.349389 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc99c3706_4911_4e0f_b822_23d0eeb2d2d9.slice/crio-f9c4a56d69f5e0f56b936d1c7eca46e20fcbe76720b3fb1209ff549955412863 WatchSource:0}: Error finding container f9c4a56d69f5e0f56b936d1c7eca46e20fcbe76720b3fb1209ff549955412863: Status 404 returned error can't find the container with id f9c4a56d69f5e0f56b936d1c7eca46e20fcbe76720b3fb1209ff549955412863 Sep 29 09:51:03 crc kubenswrapper[4779]: I0929 09:51:03.180357 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wmq5f" event={"ID":"c99c3706-4911-4e0f-b822-23d0eeb2d2d9","Type":"ContainerStarted","Data":"f9c4a56d69f5e0f56b936d1c7eca46e20fcbe76720b3fb1209ff549955412863"} Sep 29 09:51:11 crc kubenswrapper[4779]: I0929 09:51:11.253017 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wmq5f" event={"ID":"c99c3706-4911-4e0f-b822-23d0eeb2d2d9","Type":"ContainerStarted","Data":"10d68b6fe4fd0865cbfabae4ae4692eddc6d448837c492b3c525bcfe291b9620"} Sep 29 09:51:11 crc kubenswrapper[4779]: I0929 09:51:11.275180 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wmq5f" podStartSLOduration=1.715092051 podStartE2EDuration="10.275159477s" podCreationTimestamp="2025-09-29 09:51:01 +0000 UTC" firstStartedPulling="2025-09-29 09:51:02.351617033 +0000 UTC m=+1294.332940937" lastFinishedPulling="2025-09-29 09:51:10.911684429 +0000 UTC m=+1302.893008363" observedRunningTime="2025-09-29 09:51:11.266406896 +0000 UTC m=+1303.247730800" watchObservedRunningTime="2025-09-29 09:51:11.275159477 +0000 UTC m=+1303.256483411" Sep 29 09:51:14 crc kubenswrapper[4779]: I0929 09:51:14.235237 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Sep 29 09:51:14 crc kubenswrapper[4779]: I0929 09:51:14.725075 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Sep 29 09:51:29 crc kubenswrapper[4779]: I0929 09:51:29.454732 4779 generic.go:334] "Generic (PLEG): container finished" podID="c99c3706-4911-4e0f-b822-23d0eeb2d2d9" containerID="10d68b6fe4fd0865cbfabae4ae4692eddc6d448837c492b3c525bcfe291b9620" exitCode=0 Sep 29 09:51:29 crc kubenswrapper[4779]: I0929 09:51:29.454962 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wmq5f" event={"ID":"c99c3706-4911-4e0f-b822-23d0eeb2d2d9","Type":"ContainerDied","Data":"10d68b6fe4fd0865cbfabae4ae4692eddc6d448837c492b3c525bcfe291b9620"} Sep 29 09:51:30 crc kubenswrapper[4779]: I0929 09:51:30.864804 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wmq5f" Sep 29 09:51:31 crc kubenswrapper[4779]: I0929 09:51:31.043826 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c99c3706-4911-4e0f-b822-23d0eeb2d2d9-ssh-key\") pod \"c99c3706-4911-4e0f-b822-23d0eeb2d2d9\" (UID: \"c99c3706-4911-4e0f-b822-23d0eeb2d2d9\") " Sep 29 09:51:31 crc kubenswrapper[4779]: I0929 09:51:31.043888 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c99c3706-4911-4e0f-b822-23d0eeb2d2d9-inventory\") pod \"c99c3706-4911-4e0f-b822-23d0eeb2d2d9\" (UID: \"c99c3706-4911-4e0f-b822-23d0eeb2d2d9\") " Sep 29 09:51:31 crc kubenswrapper[4779]: I0929 09:51:31.044017 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t256r\" (UniqueName: \"kubernetes.io/projected/c99c3706-4911-4e0f-b822-23d0eeb2d2d9-kube-api-access-t256r\") pod \"c99c3706-4911-4e0f-b822-23d0eeb2d2d9\" (UID: \"c99c3706-4911-4e0f-b822-23d0eeb2d2d9\") " Sep 29 09:51:31 crc kubenswrapper[4779]: I0929 09:51:31.044160 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c99c3706-4911-4e0f-b822-23d0eeb2d2d9-repo-setup-combined-ca-bundle\") pod \"c99c3706-4911-4e0f-b822-23d0eeb2d2d9\" (UID: \"c99c3706-4911-4e0f-b822-23d0eeb2d2d9\") " Sep 29 09:51:31 crc kubenswrapper[4779]: I0929 09:51:31.049441 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c99c3706-4911-4e0f-b822-23d0eeb2d2d9-kube-api-access-t256r" (OuterVolumeSpecName: "kube-api-access-t256r") pod "c99c3706-4911-4e0f-b822-23d0eeb2d2d9" (UID: "c99c3706-4911-4e0f-b822-23d0eeb2d2d9"). InnerVolumeSpecName "kube-api-access-t256r". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:51:31 crc kubenswrapper[4779]: I0929 09:51:31.051153 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c99c3706-4911-4e0f-b822-23d0eeb2d2d9-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "c99c3706-4911-4e0f-b822-23d0eeb2d2d9" (UID: "c99c3706-4911-4e0f-b822-23d0eeb2d2d9"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:51:31 crc kubenswrapper[4779]: I0929 09:51:31.074482 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c99c3706-4911-4e0f-b822-23d0eeb2d2d9-inventory" (OuterVolumeSpecName: "inventory") pod "c99c3706-4911-4e0f-b822-23d0eeb2d2d9" (UID: "c99c3706-4911-4e0f-b822-23d0eeb2d2d9"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:51:31 crc kubenswrapper[4779]: I0929 09:51:31.099356 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c99c3706-4911-4e0f-b822-23d0eeb2d2d9-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c99c3706-4911-4e0f-b822-23d0eeb2d2d9" (UID: "c99c3706-4911-4e0f-b822-23d0eeb2d2d9"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:51:31 crc kubenswrapper[4779]: I0929 09:51:31.146617 4779 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c99c3706-4911-4e0f-b822-23d0eeb2d2d9-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 09:51:31 crc kubenswrapper[4779]: I0929 09:51:31.146680 4779 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c99c3706-4911-4e0f-b822-23d0eeb2d2d9-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 09:51:31 crc kubenswrapper[4779]: I0929 09:51:31.146695 4779 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c99c3706-4911-4e0f-b822-23d0eeb2d2d9-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 09:51:31 crc kubenswrapper[4779]: I0929 09:51:31.146709 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t256r\" (UniqueName: \"kubernetes.io/projected/c99c3706-4911-4e0f-b822-23d0eeb2d2d9-kube-api-access-t256r\") on node \"crc\" DevicePath \"\"" Sep 29 09:51:31 crc kubenswrapper[4779]: I0929 09:51:31.481720 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wmq5f" event={"ID":"c99c3706-4911-4e0f-b822-23d0eeb2d2d9","Type":"ContainerDied","Data":"f9c4a56d69f5e0f56b936d1c7eca46e20fcbe76720b3fb1209ff549955412863"} Sep 29 09:51:31 crc kubenswrapper[4779]: I0929 09:51:31.481774 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wmq5f" Sep 29 09:51:31 crc kubenswrapper[4779]: I0929 09:51:31.482807 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f9c4a56d69f5e0f56b936d1c7eca46e20fcbe76720b3fb1209ff549955412863" Sep 29 09:51:31 crc kubenswrapper[4779]: I0929 09:51:31.553123 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-lflhq"] Sep 29 09:51:31 crc kubenswrapper[4779]: E0929 09:51:31.553720 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c99c3706-4911-4e0f-b822-23d0eeb2d2d9" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Sep 29 09:51:31 crc kubenswrapper[4779]: I0929 09:51:31.553743 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="c99c3706-4911-4e0f-b822-23d0eeb2d2d9" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Sep 29 09:51:31 crc kubenswrapper[4779]: I0929 09:51:31.554198 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="c99c3706-4911-4e0f-b822-23d0eeb2d2d9" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Sep 29 09:51:31 crc kubenswrapper[4779]: I0929 09:51:31.555276 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-lflhq" Sep 29 09:51:31 crc kubenswrapper[4779]: I0929 09:51:31.557502 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 09:51:31 crc kubenswrapper[4779]: I0929 09:51:31.557535 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-l7nsb" Sep 29 09:51:31 crc kubenswrapper[4779]: I0929 09:51:31.558605 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 09:51:31 crc kubenswrapper[4779]: I0929 09:51:31.558841 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 09:51:31 crc kubenswrapper[4779]: I0929 09:51:31.570959 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-lflhq"] Sep 29 09:51:31 crc kubenswrapper[4779]: E0929 09:51:31.633379 4779 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc99c3706_4911_4e0f_b822_23d0eeb2d2d9.slice/crio-f9c4a56d69f5e0f56b936d1c7eca46e20fcbe76720b3fb1209ff549955412863\": RecentStats: unable to find data in memory cache]" Sep 29 09:51:31 crc kubenswrapper[4779]: I0929 09:51:31.658865 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kdlvg\" (UniqueName: \"kubernetes.io/projected/14b18262-5ab6-43d1-8477-04f85881e4d0-kube-api-access-kdlvg\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-lflhq\" (UID: \"14b18262-5ab6-43d1-8477-04f85881e4d0\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-lflhq" Sep 29 09:51:31 crc kubenswrapper[4779]: I0929 09:51:31.658952 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/14b18262-5ab6-43d1-8477-04f85881e4d0-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-lflhq\" (UID: \"14b18262-5ab6-43d1-8477-04f85881e4d0\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-lflhq" Sep 29 09:51:31 crc kubenswrapper[4779]: I0929 09:51:31.658991 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/14b18262-5ab6-43d1-8477-04f85881e4d0-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-lflhq\" (UID: \"14b18262-5ab6-43d1-8477-04f85881e4d0\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-lflhq" Sep 29 09:51:31 crc kubenswrapper[4779]: I0929 09:51:31.659039 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14b18262-5ab6-43d1-8477-04f85881e4d0-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-lflhq\" (UID: \"14b18262-5ab6-43d1-8477-04f85881e4d0\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-lflhq" Sep 29 09:51:31 crc kubenswrapper[4779]: I0929 09:51:31.761245 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/14b18262-5ab6-43d1-8477-04f85881e4d0-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-lflhq\" (UID: \"14b18262-5ab6-43d1-8477-04f85881e4d0\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-lflhq" Sep 29 09:51:31 crc kubenswrapper[4779]: I0929 09:51:31.761328 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/14b18262-5ab6-43d1-8477-04f85881e4d0-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-lflhq\" (UID: \"14b18262-5ab6-43d1-8477-04f85881e4d0\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-lflhq" Sep 29 09:51:31 crc kubenswrapper[4779]: I0929 09:51:31.761403 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14b18262-5ab6-43d1-8477-04f85881e4d0-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-lflhq\" (UID: \"14b18262-5ab6-43d1-8477-04f85881e4d0\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-lflhq" Sep 29 09:51:31 crc kubenswrapper[4779]: I0929 09:51:31.761526 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kdlvg\" (UniqueName: \"kubernetes.io/projected/14b18262-5ab6-43d1-8477-04f85881e4d0-kube-api-access-kdlvg\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-lflhq\" (UID: \"14b18262-5ab6-43d1-8477-04f85881e4d0\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-lflhq" Sep 29 09:51:31 crc kubenswrapper[4779]: I0929 09:51:31.766628 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/14b18262-5ab6-43d1-8477-04f85881e4d0-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-lflhq\" (UID: \"14b18262-5ab6-43d1-8477-04f85881e4d0\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-lflhq" Sep 29 09:51:31 crc kubenswrapper[4779]: I0929 09:51:31.766755 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/14b18262-5ab6-43d1-8477-04f85881e4d0-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-lflhq\" (UID: \"14b18262-5ab6-43d1-8477-04f85881e4d0\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-lflhq" Sep 29 09:51:31 crc kubenswrapper[4779]: I0929 09:51:31.770703 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14b18262-5ab6-43d1-8477-04f85881e4d0-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-lflhq\" (UID: \"14b18262-5ab6-43d1-8477-04f85881e4d0\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-lflhq" Sep 29 09:51:31 crc kubenswrapper[4779]: I0929 09:51:31.778967 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kdlvg\" (UniqueName: \"kubernetes.io/projected/14b18262-5ab6-43d1-8477-04f85881e4d0-kube-api-access-kdlvg\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-lflhq\" (UID: \"14b18262-5ab6-43d1-8477-04f85881e4d0\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-lflhq" Sep 29 09:51:31 crc kubenswrapper[4779]: I0929 09:51:31.878982 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-lflhq" Sep 29 09:51:32 crc kubenswrapper[4779]: I0929 09:51:32.939235 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-lflhq"] Sep 29 09:51:32 crc kubenswrapper[4779]: W0929 09:51:32.942198 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod14b18262_5ab6_43d1_8477_04f85881e4d0.slice/crio-c070a4a1a84ff8093ddfe996d234071357b038376ee269c340ded8fefa2df55d WatchSource:0}: Error finding container c070a4a1a84ff8093ddfe996d234071357b038376ee269c340ded8fefa2df55d: Status 404 returned error can't find the container with id c070a4a1a84ff8093ddfe996d234071357b038376ee269c340ded8fefa2df55d Sep 29 09:51:33 crc kubenswrapper[4779]: I0929 09:51:33.506929 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-lflhq" event={"ID":"14b18262-5ab6-43d1-8477-04f85881e4d0","Type":"ContainerStarted","Data":"c070a4a1a84ff8093ddfe996d234071357b038376ee269c340ded8fefa2df55d"} Sep 29 09:51:34 crc kubenswrapper[4779]: I0929 09:51:34.515209 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-lflhq" event={"ID":"14b18262-5ab6-43d1-8477-04f85881e4d0","Type":"ContainerStarted","Data":"c7d76ce67495d2b1394c7e8c57f79933637ffa4cf59bd795f5711b1d4023617a"} Sep 29 09:51:34 crc kubenswrapper[4779]: I0929 09:51:34.541657 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-lflhq" podStartSLOduration=3.034692848 podStartE2EDuration="3.541636552s" podCreationTimestamp="2025-09-29 09:51:31 +0000 UTC" firstStartedPulling="2025-09-29 09:51:32.946147413 +0000 UTC m=+1324.927471337" lastFinishedPulling="2025-09-29 09:51:33.453091127 +0000 UTC m=+1325.434415041" observedRunningTime="2025-09-29 09:51:34.537849593 +0000 UTC m=+1326.519173547" watchObservedRunningTime="2025-09-29 09:51:34.541636552 +0000 UTC m=+1326.522960456" Sep 29 09:52:37 crc kubenswrapper[4779]: I0929 09:52:37.270062 4779 scope.go:117] "RemoveContainer" containerID="c75f48b46418075f1c9f40d4f2db87fc6d4fdedefab4e3c3ac3031e7acdf461b" Sep 29 09:52:57 crc kubenswrapper[4779]: I0929 09:52:57.355582 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-8lkhw"] Sep 29 09:52:57 crc kubenswrapper[4779]: I0929 09:52:57.358519 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8lkhw" Sep 29 09:52:57 crc kubenswrapper[4779]: I0929 09:52:57.368743 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-8lkhw"] Sep 29 09:52:57 crc kubenswrapper[4779]: I0929 09:52:57.456811 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jzl72\" (UniqueName: \"kubernetes.io/projected/77fcb77e-dd74-44f7-9f93-4b22359bfb63-kube-api-access-jzl72\") pod \"community-operators-8lkhw\" (UID: \"77fcb77e-dd74-44f7-9f93-4b22359bfb63\") " pod="openshift-marketplace/community-operators-8lkhw" Sep 29 09:52:57 crc kubenswrapper[4779]: I0929 09:52:57.456872 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/77fcb77e-dd74-44f7-9f93-4b22359bfb63-utilities\") pod \"community-operators-8lkhw\" (UID: \"77fcb77e-dd74-44f7-9f93-4b22359bfb63\") " pod="openshift-marketplace/community-operators-8lkhw" Sep 29 09:52:57 crc kubenswrapper[4779]: I0929 09:52:57.456912 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/77fcb77e-dd74-44f7-9f93-4b22359bfb63-catalog-content\") pod \"community-operators-8lkhw\" (UID: \"77fcb77e-dd74-44f7-9f93-4b22359bfb63\") " pod="openshift-marketplace/community-operators-8lkhw" Sep 29 09:52:57 crc kubenswrapper[4779]: I0929 09:52:57.558778 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jzl72\" (UniqueName: \"kubernetes.io/projected/77fcb77e-dd74-44f7-9f93-4b22359bfb63-kube-api-access-jzl72\") pod \"community-operators-8lkhw\" (UID: \"77fcb77e-dd74-44f7-9f93-4b22359bfb63\") " pod="openshift-marketplace/community-operators-8lkhw" Sep 29 09:52:57 crc kubenswrapper[4779]: I0929 09:52:57.558836 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/77fcb77e-dd74-44f7-9f93-4b22359bfb63-utilities\") pod \"community-operators-8lkhw\" (UID: \"77fcb77e-dd74-44f7-9f93-4b22359bfb63\") " pod="openshift-marketplace/community-operators-8lkhw" Sep 29 09:52:57 crc kubenswrapper[4779]: I0929 09:52:57.558862 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/77fcb77e-dd74-44f7-9f93-4b22359bfb63-catalog-content\") pod \"community-operators-8lkhw\" (UID: \"77fcb77e-dd74-44f7-9f93-4b22359bfb63\") " pod="openshift-marketplace/community-operators-8lkhw" Sep 29 09:52:57 crc kubenswrapper[4779]: I0929 09:52:57.559331 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/77fcb77e-dd74-44f7-9f93-4b22359bfb63-utilities\") pod \"community-operators-8lkhw\" (UID: \"77fcb77e-dd74-44f7-9f93-4b22359bfb63\") " pod="openshift-marketplace/community-operators-8lkhw" Sep 29 09:52:57 crc kubenswrapper[4779]: I0929 09:52:57.559375 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/77fcb77e-dd74-44f7-9f93-4b22359bfb63-catalog-content\") pod \"community-operators-8lkhw\" (UID: \"77fcb77e-dd74-44f7-9f93-4b22359bfb63\") " pod="openshift-marketplace/community-operators-8lkhw" Sep 29 09:52:57 crc kubenswrapper[4779]: I0929 09:52:57.578045 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jzl72\" (UniqueName: \"kubernetes.io/projected/77fcb77e-dd74-44f7-9f93-4b22359bfb63-kube-api-access-jzl72\") pod \"community-operators-8lkhw\" (UID: \"77fcb77e-dd74-44f7-9f93-4b22359bfb63\") " pod="openshift-marketplace/community-operators-8lkhw" Sep 29 09:52:57 crc kubenswrapper[4779]: I0929 09:52:57.727781 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8lkhw" Sep 29 09:52:58 crc kubenswrapper[4779]: I0929 09:52:58.251823 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-8lkhw"] Sep 29 09:52:58 crc kubenswrapper[4779]: I0929 09:52:58.414682 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8lkhw" event={"ID":"77fcb77e-dd74-44f7-9f93-4b22359bfb63","Type":"ContainerStarted","Data":"abf7d0adf67084af5b7effd86a90b45f95b9a7a145ea8657bb6da2f28208ade8"} Sep 29 09:52:59 crc kubenswrapper[4779]: I0929 09:52:59.425552 4779 generic.go:334] "Generic (PLEG): container finished" podID="77fcb77e-dd74-44f7-9f93-4b22359bfb63" containerID="27cb0f2eaac3b66ca3d4f741e55075eb016423fae696714acd1c9bbe3006a00c" exitCode=0 Sep 29 09:52:59 crc kubenswrapper[4779]: I0929 09:52:59.425609 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8lkhw" event={"ID":"77fcb77e-dd74-44f7-9f93-4b22359bfb63","Type":"ContainerDied","Data":"27cb0f2eaac3b66ca3d4f741e55075eb016423fae696714acd1c9bbe3006a00c"} Sep 29 09:53:00 crc kubenswrapper[4779]: I0929 09:53:00.437716 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8lkhw" event={"ID":"77fcb77e-dd74-44f7-9f93-4b22359bfb63","Type":"ContainerStarted","Data":"89d640d4b6fa1428238dff31c1bea02596f57d0c2544825eded679aef6726f23"} Sep 29 09:53:01 crc kubenswrapper[4779]: I0929 09:53:01.450855 4779 generic.go:334] "Generic (PLEG): container finished" podID="77fcb77e-dd74-44f7-9f93-4b22359bfb63" containerID="89d640d4b6fa1428238dff31c1bea02596f57d0c2544825eded679aef6726f23" exitCode=0 Sep 29 09:53:01 crc kubenswrapper[4779]: I0929 09:53:01.451003 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8lkhw" event={"ID":"77fcb77e-dd74-44f7-9f93-4b22359bfb63","Type":"ContainerDied","Data":"89d640d4b6fa1428238dff31c1bea02596f57d0c2544825eded679aef6726f23"} Sep 29 09:53:02 crc kubenswrapper[4779]: I0929 09:53:02.462629 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8lkhw" event={"ID":"77fcb77e-dd74-44f7-9f93-4b22359bfb63","Type":"ContainerStarted","Data":"1bb196abcdd5e0e70c5fe66b394839b730383598b154630caba872ff968c1904"} Sep 29 09:53:02 crc kubenswrapper[4779]: I0929 09:53:02.486550 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-8lkhw" podStartSLOduration=3.047567717 podStartE2EDuration="5.486535484s" podCreationTimestamp="2025-09-29 09:52:57 +0000 UTC" firstStartedPulling="2025-09-29 09:52:59.428123342 +0000 UTC m=+1411.409447246" lastFinishedPulling="2025-09-29 09:53:01.867091109 +0000 UTC m=+1413.848415013" observedRunningTime="2025-09-29 09:53:02.479292743 +0000 UTC m=+1414.460616667" watchObservedRunningTime="2025-09-29 09:53:02.486535484 +0000 UTC m=+1414.467859378" Sep 29 09:53:07 crc kubenswrapper[4779]: I0929 09:53:07.728073 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-8lkhw" Sep 29 09:53:07 crc kubenswrapper[4779]: I0929 09:53:07.728603 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-8lkhw" Sep 29 09:53:07 crc kubenswrapper[4779]: I0929 09:53:07.781044 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-8lkhw" Sep 29 09:53:08 crc kubenswrapper[4779]: I0929 09:53:08.620219 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-8lkhw" Sep 29 09:53:08 crc kubenswrapper[4779]: I0929 09:53:08.668550 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-8lkhw"] Sep 29 09:53:10 crc kubenswrapper[4779]: I0929 09:53:10.588305 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-8lkhw" podUID="77fcb77e-dd74-44f7-9f93-4b22359bfb63" containerName="registry-server" containerID="cri-o://1bb196abcdd5e0e70c5fe66b394839b730383598b154630caba872ff968c1904" gracePeriod=2 Sep 29 09:53:11 crc kubenswrapper[4779]: I0929 09:53:11.060996 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8lkhw" Sep 29 09:53:11 crc kubenswrapper[4779]: I0929 09:53:11.133130 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/77fcb77e-dd74-44f7-9f93-4b22359bfb63-catalog-content\") pod \"77fcb77e-dd74-44f7-9f93-4b22359bfb63\" (UID: \"77fcb77e-dd74-44f7-9f93-4b22359bfb63\") " Sep 29 09:53:11 crc kubenswrapper[4779]: I0929 09:53:11.133202 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/77fcb77e-dd74-44f7-9f93-4b22359bfb63-utilities\") pod \"77fcb77e-dd74-44f7-9f93-4b22359bfb63\" (UID: \"77fcb77e-dd74-44f7-9f93-4b22359bfb63\") " Sep 29 09:53:11 crc kubenswrapper[4779]: I0929 09:53:11.133335 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jzl72\" (UniqueName: \"kubernetes.io/projected/77fcb77e-dd74-44f7-9f93-4b22359bfb63-kube-api-access-jzl72\") pod \"77fcb77e-dd74-44f7-9f93-4b22359bfb63\" (UID: \"77fcb77e-dd74-44f7-9f93-4b22359bfb63\") " Sep 29 09:53:11 crc kubenswrapper[4779]: I0929 09:53:11.134281 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/77fcb77e-dd74-44f7-9f93-4b22359bfb63-utilities" (OuterVolumeSpecName: "utilities") pod "77fcb77e-dd74-44f7-9f93-4b22359bfb63" (UID: "77fcb77e-dd74-44f7-9f93-4b22359bfb63"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:53:11 crc kubenswrapper[4779]: I0929 09:53:11.141646 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/77fcb77e-dd74-44f7-9f93-4b22359bfb63-kube-api-access-jzl72" (OuterVolumeSpecName: "kube-api-access-jzl72") pod "77fcb77e-dd74-44f7-9f93-4b22359bfb63" (UID: "77fcb77e-dd74-44f7-9f93-4b22359bfb63"). InnerVolumeSpecName "kube-api-access-jzl72". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:53:11 crc kubenswrapper[4779]: I0929 09:53:11.203366 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/77fcb77e-dd74-44f7-9f93-4b22359bfb63-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "77fcb77e-dd74-44f7-9f93-4b22359bfb63" (UID: "77fcb77e-dd74-44f7-9f93-4b22359bfb63"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:53:11 crc kubenswrapper[4779]: I0929 09:53:11.235272 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/77fcb77e-dd74-44f7-9f93-4b22359bfb63-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 09:53:11 crc kubenswrapper[4779]: I0929 09:53:11.235314 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/77fcb77e-dd74-44f7-9f93-4b22359bfb63-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 09:53:11 crc kubenswrapper[4779]: I0929 09:53:11.235329 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jzl72\" (UniqueName: \"kubernetes.io/projected/77fcb77e-dd74-44f7-9f93-4b22359bfb63-kube-api-access-jzl72\") on node \"crc\" DevicePath \"\"" Sep 29 09:53:11 crc kubenswrapper[4779]: I0929 09:53:11.604359 4779 generic.go:334] "Generic (PLEG): container finished" podID="77fcb77e-dd74-44f7-9f93-4b22359bfb63" containerID="1bb196abcdd5e0e70c5fe66b394839b730383598b154630caba872ff968c1904" exitCode=0 Sep 29 09:53:11 crc kubenswrapper[4779]: I0929 09:53:11.604407 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8lkhw" event={"ID":"77fcb77e-dd74-44f7-9f93-4b22359bfb63","Type":"ContainerDied","Data":"1bb196abcdd5e0e70c5fe66b394839b730383598b154630caba872ff968c1904"} Sep 29 09:53:11 crc kubenswrapper[4779]: I0929 09:53:11.604429 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8lkhw" Sep 29 09:53:11 crc kubenswrapper[4779]: I0929 09:53:11.604456 4779 scope.go:117] "RemoveContainer" containerID="1bb196abcdd5e0e70c5fe66b394839b730383598b154630caba872ff968c1904" Sep 29 09:53:11 crc kubenswrapper[4779]: I0929 09:53:11.604443 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8lkhw" event={"ID":"77fcb77e-dd74-44f7-9f93-4b22359bfb63","Type":"ContainerDied","Data":"abf7d0adf67084af5b7effd86a90b45f95b9a7a145ea8657bb6da2f28208ade8"} Sep 29 09:53:11 crc kubenswrapper[4779]: I0929 09:53:11.645453 4779 scope.go:117] "RemoveContainer" containerID="89d640d4b6fa1428238dff31c1bea02596f57d0c2544825eded679aef6726f23" Sep 29 09:53:11 crc kubenswrapper[4779]: I0929 09:53:11.648777 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-8lkhw"] Sep 29 09:53:11 crc kubenswrapper[4779]: I0929 09:53:11.660610 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-8lkhw"] Sep 29 09:53:11 crc kubenswrapper[4779]: I0929 09:53:11.683074 4779 scope.go:117] "RemoveContainer" containerID="27cb0f2eaac3b66ca3d4f741e55075eb016423fae696714acd1c9bbe3006a00c" Sep 29 09:53:11 crc kubenswrapper[4779]: I0929 09:53:11.725587 4779 scope.go:117] "RemoveContainer" containerID="1bb196abcdd5e0e70c5fe66b394839b730383598b154630caba872ff968c1904" Sep 29 09:53:11 crc kubenswrapper[4779]: E0929 09:53:11.726142 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1bb196abcdd5e0e70c5fe66b394839b730383598b154630caba872ff968c1904\": container with ID starting with 1bb196abcdd5e0e70c5fe66b394839b730383598b154630caba872ff968c1904 not found: ID does not exist" containerID="1bb196abcdd5e0e70c5fe66b394839b730383598b154630caba872ff968c1904" Sep 29 09:53:11 crc kubenswrapper[4779]: I0929 09:53:11.726213 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1bb196abcdd5e0e70c5fe66b394839b730383598b154630caba872ff968c1904"} err="failed to get container status \"1bb196abcdd5e0e70c5fe66b394839b730383598b154630caba872ff968c1904\": rpc error: code = NotFound desc = could not find container \"1bb196abcdd5e0e70c5fe66b394839b730383598b154630caba872ff968c1904\": container with ID starting with 1bb196abcdd5e0e70c5fe66b394839b730383598b154630caba872ff968c1904 not found: ID does not exist" Sep 29 09:53:11 crc kubenswrapper[4779]: I0929 09:53:11.726331 4779 scope.go:117] "RemoveContainer" containerID="89d640d4b6fa1428238dff31c1bea02596f57d0c2544825eded679aef6726f23" Sep 29 09:53:11 crc kubenswrapper[4779]: E0929 09:53:11.727044 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"89d640d4b6fa1428238dff31c1bea02596f57d0c2544825eded679aef6726f23\": container with ID starting with 89d640d4b6fa1428238dff31c1bea02596f57d0c2544825eded679aef6726f23 not found: ID does not exist" containerID="89d640d4b6fa1428238dff31c1bea02596f57d0c2544825eded679aef6726f23" Sep 29 09:53:11 crc kubenswrapper[4779]: I0929 09:53:11.727106 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"89d640d4b6fa1428238dff31c1bea02596f57d0c2544825eded679aef6726f23"} err="failed to get container status \"89d640d4b6fa1428238dff31c1bea02596f57d0c2544825eded679aef6726f23\": rpc error: code = NotFound desc = could not find container \"89d640d4b6fa1428238dff31c1bea02596f57d0c2544825eded679aef6726f23\": container with ID starting with 89d640d4b6fa1428238dff31c1bea02596f57d0c2544825eded679aef6726f23 not found: ID does not exist" Sep 29 09:53:11 crc kubenswrapper[4779]: I0929 09:53:11.727154 4779 scope.go:117] "RemoveContainer" containerID="27cb0f2eaac3b66ca3d4f741e55075eb016423fae696714acd1c9bbe3006a00c" Sep 29 09:53:11 crc kubenswrapper[4779]: E0929 09:53:11.727562 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"27cb0f2eaac3b66ca3d4f741e55075eb016423fae696714acd1c9bbe3006a00c\": container with ID starting with 27cb0f2eaac3b66ca3d4f741e55075eb016423fae696714acd1c9bbe3006a00c not found: ID does not exist" containerID="27cb0f2eaac3b66ca3d4f741e55075eb016423fae696714acd1c9bbe3006a00c" Sep 29 09:53:11 crc kubenswrapper[4779]: I0929 09:53:11.727603 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"27cb0f2eaac3b66ca3d4f741e55075eb016423fae696714acd1c9bbe3006a00c"} err="failed to get container status \"27cb0f2eaac3b66ca3d4f741e55075eb016423fae696714acd1c9bbe3006a00c\": rpc error: code = NotFound desc = could not find container \"27cb0f2eaac3b66ca3d4f741e55075eb016423fae696714acd1c9bbe3006a00c\": container with ID starting with 27cb0f2eaac3b66ca3d4f741e55075eb016423fae696714acd1c9bbe3006a00c not found: ID does not exist" Sep 29 09:53:12 crc kubenswrapper[4779]: I0929 09:53:12.750336 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="77fcb77e-dd74-44f7-9f93-4b22359bfb63" path="/var/lib/kubelet/pods/77fcb77e-dd74-44f7-9f93-4b22359bfb63/volumes" Sep 29 09:53:16 crc kubenswrapper[4779]: I0929 09:53:16.966972 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 09:53:16 crc kubenswrapper[4779]: I0929 09:53:16.967286 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 09:53:35 crc kubenswrapper[4779]: I0929 09:53:35.283523 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-cz9q5"] Sep 29 09:53:35 crc kubenswrapper[4779]: E0929 09:53:35.284391 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77fcb77e-dd74-44f7-9f93-4b22359bfb63" containerName="registry-server" Sep 29 09:53:35 crc kubenswrapper[4779]: I0929 09:53:35.284408 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="77fcb77e-dd74-44f7-9f93-4b22359bfb63" containerName="registry-server" Sep 29 09:53:35 crc kubenswrapper[4779]: E0929 09:53:35.284437 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77fcb77e-dd74-44f7-9f93-4b22359bfb63" containerName="extract-utilities" Sep 29 09:53:35 crc kubenswrapper[4779]: I0929 09:53:35.284444 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="77fcb77e-dd74-44f7-9f93-4b22359bfb63" containerName="extract-utilities" Sep 29 09:53:35 crc kubenswrapper[4779]: E0929 09:53:35.284456 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77fcb77e-dd74-44f7-9f93-4b22359bfb63" containerName="extract-content" Sep 29 09:53:35 crc kubenswrapper[4779]: I0929 09:53:35.284463 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="77fcb77e-dd74-44f7-9f93-4b22359bfb63" containerName="extract-content" Sep 29 09:53:35 crc kubenswrapper[4779]: I0929 09:53:35.284711 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="77fcb77e-dd74-44f7-9f93-4b22359bfb63" containerName="registry-server" Sep 29 09:53:35 crc kubenswrapper[4779]: I0929 09:53:35.286096 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cz9q5" Sep 29 09:53:35 crc kubenswrapper[4779]: I0929 09:53:35.302046 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cz9q5"] Sep 29 09:53:35 crc kubenswrapper[4779]: I0929 09:53:35.332449 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47448e3a-9037-4dd5-bee6-7f6f7118258b-catalog-content\") pod \"redhat-operators-cz9q5\" (UID: \"47448e3a-9037-4dd5-bee6-7f6f7118258b\") " pod="openshift-marketplace/redhat-operators-cz9q5" Sep 29 09:53:35 crc kubenswrapper[4779]: I0929 09:53:35.333098 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47448e3a-9037-4dd5-bee6-7f6f7118258b-utilities\") pod \"redhat-operators-cz9q5\" (UID: \"47448e3a-9037-4dd5-bee6-7f6f7118258b\") " pod="openshift-marketplace/redhat-operators-cz9q5" Sep 29 09:53:35 crc kubenswrapper[4779]: I0929 09:53:35.333358 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k9xng\" (UniqueName: \"kubernetes.io/projected/47448e3a-9037-4dd5-bee6-7f6f7118258b-kube-api-access-k9xng\") pod \"redhat-operators-cz9q5\" (UID: \"47448e3a-9037-4dd5-bee6-7f6f7118258b\") " pod="openshift-marketplace/redhat-operators-cz9q5" Sep 29 09:53:35 crc kubenswrapper[4779]: I0929 09:53:35.435172 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k9xng\" (UniqueName: \"kubernetes.io/projected/47448e3a-9037-4dd5-bee6-7f6f7118258b-kube-api-access-k9xng\") pod \"redhat-operators-cz9q5\" (UID: \"47448e3a-9037-4dd5-bee6-7f6f7118258b\") " pod="openshift-marketplace/redhat-operators-cz9q5" Sep 29 09:53:35 crc kubenswrapper[4779]: I0929 09:53:35.435317 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47448e3a-9037-4dd5-bee6-7f6f7118258b-catalog-content\") pod \"redhat-operators-cz9q5\" (UID: \"47448e3a-9037-4dd5-bee6-7f6f7118258b\") " pod="openshift-marketplace/redhat-operators-cz9q5" Sep 29 09:53:35 crc kubenswrapper[4779]: I0929 09:53:35.435400 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47448e3a-9037-4dd5-bee6-7f6f7118258b-utilities\") pod \"redhat-operators-cz9q5\" (UID: \"47448e3a-9037-4dd5-bee6-7f6f7118258b\") " pod="openshift-marketplace/redhat-operators-cz9q5" Sep 29 09:53:35 crc kubenswrapper[4779]: I0929 09:53:35.435982 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47448e3a-9037-4dd5-bee6-7f6f7118258b-catalog-content\") pod \"redhat-operators-cz9q5\" (UID: \"47448e3a-9037-4dd5-bee6-7f6f7118258b\") " pod="openshift-marketplace/redhat-operators-cz9q5" Sep 29 09:53:35 crc kubenswrapper[4779]: I0929 09:53:35.435996 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47448e3a-9037-4dd5-bee6-7f6f7118258b-utilities\") pod \"redhat-operators-cz9q5\" (UID: \"47448e3a-9037-4dd5-bee6-7f6f7118258b\") " pod="openshift-marketplace/redhat-operators-cz9q5" Sep 29 09:53:35 crc kubenswrapper[4779]: I0929 09:53:35.456202 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k9xng\" (UniqueName: \"kubernetes.io/projected/47448e3a-9037-4dd5-bee6-7f6f7118258b-kube-api-access-k9xng\") pod \"redhat-operators-cz9q5\" (UID: \"47448e3a-9037-4dd5-bee6-7f6f7118258b\") " pod="openshift-marketplace/redhat-operators-cz9q5" Sep 29 09:53:35 crc kubenswrapper[4779]: I0929 09:53:35.609127 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cz9q5" Sep 29 09:53:36 crc kubenswrapper[4779]: I0929 09:53:36.059602 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cz9q5"] Sep 29 09:53:36 crc kubenswrapper[4779]: I0929 09:53:36.855853 4779 generic.go:334] "Generic (PLEG): container finished" podID="47448e3a-9037-4dd5-bee6-7f6f7118258b" containerID="f3e5d6b970563ebf589684d32c3000c6d9a1db15d74623542c4752c1a25fa2ca" exitCode=0 Sep 29 09:53:36 crc kubenswrapper[4779]: I0929 09:53:36.856138 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cz9q5" event={"ID":"47448e3a-9037-4dd5-bee6-7f6f7118258b","Type":"ContainerDied","Data":"f3e5d6b970563ebf589684d32c3000c6d9a1db15d74623542c4752c1a25fa2ca"} Sep 29 09:53:36 crc kubenswrapper[4779]: I0929 09:53:36.856188 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cz9q5" event={"ID":"47448e3a-9037-4dd5-bee6-7f6f7118258b","Type":"ContainerStarted","Data":"76611704df026a71a648a42b255c23b16ca405566f6a181a569a4d2b27380ee2"} Sep 29 09:53:37 crc kubenswrapper[4779]: I0929 09:53:37.353570 4779 scope.go:117] "RemoveContainer" containerID="9e5b56d40142339b2b19586d2b73e6bc5234bec86797cccf6459ec28f327845b" Sep 29 09:53:37 crc kubenswrapper[4779]: I0929 09:53:37.377594 4779 scope.go:117] "RemoveContainer" containerID="3386d853ea16d45ed25d2463b56a6269729e3608892fc940164ef824e377547b" Sep 29 09:53:37 crc kubenswrapper[4779]: I0929 09:53:37.463829 4779 scope.go:117] "RemoveContainer" containerID="797dbaa177f801ec5b0bc4880bb48c6bc51054477e09eed659029962b71a0084" Sep 29 09:53:37 crc kubenswrapper[4779]: I0929 09:53:37.496319 4779 scope.go:117] "RemoveContainer" containerID="219a0bdf148545207fc39825edbb62537b18e3fbab5e6999e4931f8167f4f9e7" Sep 29 09:53:37 crc kubenswrapper[4779]: I0929 09:53:37.520444 4779 scope.go:117] "RemoveContainer" containerID="6d416c201fc6350bc36a311c5718e7aa7a01c858bafd3e6cacaf6b4ab23a663a" Sep 29 09:53:38 crc kubenswrapper[4779]: I0929 09:53:38.885916 4779 generic.go:334] "Generic (PLEG): container finished" podID="47448e3a-9037-4dd5-bee6-7f6f7118258b" containerID="bd74c195dce0ff2598426278eb9113fb1a369ba10f5c8433acb38c92af124b54" exitCode=0 Sep 29 09:53:38 crc kubenswrapper[4779]: I0929 09:53:38.886000 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cz9q5" event={"ID":"47448e3a-9037-4dd5-bee6-7f6f7118258b","Type":"ContainerDied","Data":"bd74c195dce0ff2598426278eb9113fb1a369ba10f5c8433acb38c92af124b54"} Sep 29 09:53:40 crc kubenswrapper[4779]: I0929 09:53:40.904161 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cz9q5" event={"ID":"47448e3a-9037-4dd5-bee6-7f6f7118258b","Type":"ContainerStarted","Data":"3959571dc1f46acce3059afd45a324c4543d93afd02b203946f8cf8a21626990"} Sep 29 09:53:40 crc kubenswrapper[4779]: I0929 09:53:40.940350 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-cz9q5" podStartSLOduration=2.650787507 podStartE2EDuration="5.940328597s" podCreationTimestamp="2025-09-29 09:53:35 +0000 UTC" firstStartedPulling="2025-09-29 09:53:36.859148086 +0000 UTC m=+1448.840471990" lastFinishedPulling="2025-09-29 09:53:40.148689176 +0000 UTC m=+1452.130013080" observedRunningTime="2025-09-29 09:53:40.929050399 +0000 UTC m=+1452.910374303" watchObservedRunningTime="2025-09-29 09:53:40.940328597 +0000 UTC m=+1452.921652511" Sep 29 09:53:45 crc kubenswrapper[4779]: I0929 09:53:45.610053 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-cz9q5" Sep 29 09:53:45 crc kubenswrapper[4779]: I0929 09:53:45.610562 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-cz9q5" Sep 29 09:53:45 crc kubenswrapper[4779]: I0929 09:53:45.656384 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-cz9q5" Sep 29 09:53:45 crc kubenswrapper[4779]: I0929 09:53:45.992965 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-cz9q5" Sep 29 09:53:46 crc kubenswrapper[4779]: I0929 09:53:46.034966 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-cz9q5"] Sep 29 09:53:46 crc kubenswrapper[4779]: I0929 09:53:46.966563 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 09:53:46 crc kubenswrapper[4779]: I0929 09:53:46.966630 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 09:53:47 crc kubenswrapper[4779]: I0929 09:53:47.966780 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-cz9q5" podUID="47448e3a-9037-4dd5-bee6-7f6f7118258b" containerName="registry-server" containerID="cri-o://3959571dc1f46acce3059afd45a324c4543d93afd02b203946f8cf8a21626990" gracePeriod=2 Sep 29 09:53:48 crc kubenswrapper[4779]: I0929 09:53:48.465406 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cz9q5" Sep 29 09:53:48 crc kubenswrapper[4779]: I0929 09:53:48.575627 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47448e3a-9037-4dd5-bee6-7f6f7118258b-utilities\") pod \"47448e3a-9037-4dd5-bee6-7f6f7118258b\" (UID: \"47448e3a-9037-4dd5-bee6-7f6f7118258b\") " Sep 29 09:53:48 crc kubenswrapper[4779]: I0929 09:53:48.575791 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47448e3a-9037-4dd5-bee6-7f6f7118258b-catalog-content\") pod \"47448e3a-9037-4dd5-bee6-7f6f7118258b\" (UID: \"47448e3a-9037-4dd5-bee6-7f6f7118258b\") " Sep 29 09:53:48 crc kubenswrapper[4779]: I0929 09:53:48.575954 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k9xng\" (UniqueName: \"kubernetes.io/projected/47448e3a-9037-4dd5-bee6-7f6f7118258b-kube-api-access-k9xng\") pod \"47448e3a-9037-4dd5-bee6-7f6f7118258b\" (UID: \"47448e3a-9037-4dd5-bee6-7f6f7118258b\") " Sep 29 09:53:48 crc kubenswrapper[4779]: I0929 09:53:48.576494 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/47448e3a-9037-4dd5-bee6-7f6f7118258b-utilities" (OuterVolumeSpecName: "utilities") pod "47448e3a-9037-4dd5-bee6-7f6f7118258b" (UID: "47448e3a-9037-4dd5-bee6-7f6f7118258b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:53:48 crc kubenswrapper[4779]: I0929 09:53:48.581582 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/47448e3a-9037-4dd5-bee6-7f6f7118258b-kube-api-access-k9xng" (OuterVolumeSpecName: "kube-api-access-k9xng") pod "47448e3a-9037-4dd5-bee6-7f6f7118258b" (UID: "47448e3a-9037-4dd5-bee6-7f6f7118258b"). InnerVolumeSpecName "kube-api-access-k9xng". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:53:48 crc kubenswrapper[4779]: I0929 09:53:48.654343 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/47448e3a-9037-4dd5-bee6-7f6f7118258b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "47448e3a-9037-4dd5-bee6-7f6f7118258b" (UID: "47448e3a-9037-4dd5-bee6-7f6f7118258b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:53:48 crc kubenswrapper[4779]: I0929 09:53:48.678536 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47448e3a-9037-4dd5-bee6-7f6f7118258b-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 09:53:48 crc kubenswrapper[4779]: I0929 09:53:48.679021 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k9xng\" (UniqueName: \"kubernetes.io/projected/47448e3a-9037-4dd5-bee6-7f6f7118258b-kube-api-access-k9xng\") on node \"crc\" DevicePath \"\"" Sep 29 09:53:48 crc kubenswrapper[4779]: I0929 09:53:48.679045 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47448e3a-9037-4dd5-bee6-7f6f7118258b-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 09:53:48 crc kubenswrapper[4779]: I0929 09:53:48.977163 4779 generic.go:334] "Generic (PLEG): container finished" podID="47448e3a-9037-4dd5-bee6-7f6f7118258b" containerID="3959571dc1f46acce3059afd45a324c4543d93afd02b203946f8cf8a21626990" exitCode=0 Sep 29 09:53:48 crc kubenswrapper[4779]: I0929 09:53:48.977203 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cz9q5" event={"ID":"47448e3a-9037-4dd5-bee6-7f6f7118258b","Type":"ContainerDied","Data":"3959571dc1f46acce3059afd45a324c4543d93afd02b203946f8cf8a21626990"} Sep 29 09:53:48 crc kubenswrapper[4779]: I0929 09:53:48.977227 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cz9q5" event={"ID":"47448e3a-9037-4dd5-bee6-7f6f7118258b","Type":"ContainerDied","Data":"76611704df026a71a648a42b255c23b16ca405566f6a181a569a4d2b27380ee2"} Sep 29 09:53:48 crc kubenswrapper[4779]: I0929 09:53:48.977243 4779 scope.go:117] "RemoveContainer" containerID="3959571dc1f46acce3059afd45a324c4543d93afd02b203946f8cf8a21626990" Sep 29 09:53:48 crc kubenswrapper[4779]: I0929 09:53:48.977344 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cz9q5" Sep 29 09:53:49 crc kubenswrapper[4779]: I0929 09:53:49.005561 4779 scope.go:117] "RemoveContainer" containerID="bd74c195dce0ff2598426278eb9113fb1a369ba10f5c8433acb38c92af124b54" Sep 29 09:53:49 crc kubenswrapper[4779]: I0929 09:53:49.006389 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-cz9q5"] Sep 29 09:53:49 crc kubenswrapper[4779]: I0929 09:53:49.015153 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-cz9q5"] Sep 29 09:53:49 crc kubenswrapper[4779]: I0929 09:53:49.027164 4779 scope.go:117] "RemoveContainer" containerID="f3e5d6b970563ebf589684d32c3000c6d9a1db15d74623542c4752c1a25fa2ca" Sep 29 09:53:49 crc kubenswrapper[4779]: I0929 09:53:49.071988 4779 scope.go:117] "RemoveContainer" containerID="3959571dc1f46acce3059afd45a324c4543d93afd02b203946f8cf8a21626990" Sep 29 09:53:49 crc kubenswrapper[4779]: E0929 09:53:49.072372 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3959571dc1f46acce3059afd45a324c4543d93afd02b203946f8cf8a21626990\": container with ID starting with 3959571dc1f46acce3059afd45a324c4543d93afd02b203946f8cf8a21626990 not found: ID does not exist" containerID="3959571dc1f46acce3059afd45a324c4543d93afd02b203946f8cf8a21626990" Sep 29 09:53:49 crc kubenswrapper[4779]: I0929 09:53:49.072403 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3959571dc1f46acce3059afd45a324c4543d93afd02b203946f8cf8a21626990"} err="failed to get container status \"3959571dc1f46acce3059afd45a324c4543d93afd02b203946f8cf8a21626990\": rpc error: code = NotFound desc = could not find container \"3959571dc1f46acce3059afd45a324c4543d93afd02b203946f8cf8a21626990\": container with ID starting with 3959571dc1f46acce3059afd45a324c4543d93afd02b203946f8cf8a21626990 not found: ID does not exist" Sep 29 09:53:49 crc kubenswrapper[4779]: I0929 09:53:49.072420 4779 scope.go:117] "RemoveContainer" containerID="bd74c195dce0ff2598426278eb9113fb1a369ba10f5c8433acb38c92af124b54" Sep 29 09:53:49 crc kubenswrapper[4779]: E0929 09:53:49.073144 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bd74c195dce0ff2598426278eb9113fb1a369ba10f5c8433acb38c92af124b54\": container with ID starting with bd74c195dce0ff2598426278eb9113fb1a369ba10f5c8433acb38c92af124b54 not found: ID does not exist" containerID="bd74c195dce0ff2598426278eb9113fb1a369ba10f5c8433acb38c92af124b54" Sep 29 09:53:49 crc kubenswrapper[4779]: I0929 09:53:49.073170 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd74c195dce0ff2598426278eb9113fb1a369ba10f5c8433acb38c92af124b54"} err="failed to get container status \"bd74c195dce0ff2598426278eb9113fb1a369ba10f5c8433acb38c92af124b54\": rpc error: code = NotFound desc = could not find container \"bd74c195dce0ff2598426278eb9113fb1a369ba10f5c8433acb38c92af124b54\": container with ID starting with bd74c195dce0ff2598426278eb9113fb1a369ba10f5c8433acb38c92af124b54 not found: ID does not exist" Sep 29 09:53:49 crc kubenswrapper[4779]: I0929 09:53:49.073184 4779 scope.go:117] "RemoveContainer" containerID="f3e5d6b970563ebf589684d32c3000c6d9a1db15d74623542c4752c1a25fa2ca" Sep 29 09:53:49 crc kubenswrapper[4779]: E0929 09:53:49.073443 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f3e5d6b970563ebf589684d32c3000c6d9a1db15d74623542c4752c1a25fa2ca\": container with ID starting with f3e5d6b970563ebf589684d32c3000c6d9a1db15d74623542c4752c1a25fa2ca not found: ID does not exist" containerID="f3e5d6b970563ebf589684d32c3000c6d9a1db15d74623542c4752c1a25fa2ca" Sep 29 09:53:49 crc kubenswrapper[4779]: I0929 09:53:49.073463 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f3e5d6b970563ebf589684d32c3000c6d9a1db15d74623542c4752c1a25fa2ca"} err="failed to get container status \"f3e5d6b970563ebf589684d32c3000c6d9a1db15d74623542c4752c1a25fa2ca\": rpc error: code = NotFound desc = could not find container \"f3e5d6b970563ebf589684d32c3000c6d9a1db15d74623542c4752c1a25fa2ca\": container with ID starting with f3e5d6b970563ebf589684d32c3000c6d9a1db15d74623542c4752c1a25fa2ca not found: ID does not exist" Sep 29 09:53:50 crc kubenswrapper[4779]: I0929 09:53:50.723890 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="47448e3a-9037-4dd5-bee6-7f6f7118258b" path="/var/lib/kubelet/pods/47448e3a-9037-4dd5-bee6-7f6f7118258b/volumes" Sep 29 09:54:16 crc kubenswrapper[4779]: I0929 09:54:16.966374 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 09:54:16 crc kubenswrapper[4779]: I0929 09:54:16.967099 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 09:54:16 crc kubenswrapper[4779]: I0929 09:54:16.967165 4779 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" Sep 29 09:54:16 crc kubenswrapper[4779]: I0929 09:54:16.968231 4779 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9b76d7f97cdc0312b7d80786a3004ed45f01d356ab8c91f3656d0fd71503e713"} pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 09:54:16 crc kubenswrapper[4779]: I0929 09:54:16.968342 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" containerID="cri-o://9b76d7f97cdc0312b7d80786a3004ed45f01d356ab8c91f3656d0fd71503e713" gracePeriod=600 Sep 29 09:54:17 crc kubenswrapper[4779]: I0929 09:54:17.276229 4779 generic.go:334] "Generic (PLEG): container finished" podID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerID="9b76d7f97cdc0312b7d80786a3004ed45f01d356ab8c91f3656d0fd71503e713" exitCode=0 Sep 29 09:54:17 crc kubenswrapper[4779]: I0929 09:54:17.276299 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" event={"ID":"f1a5d3a7-37d9-4a87-864c-e4af7f504a19","Type":"ContainerDied","Data":"9b76d7f97cdc0312b7d80786a3004ed45f01d356ab8c91f3656d0fd71503e713"} Sep 29 09:54:17 crc kubenswrapper[4779]: I0929 09:54:17.276662 4779 scope.go:117] "RemoveContainer" containerID="3c3dea31b0e7eb572818f728d7b074a8ac3d1e14ba537ebb0fed907a0fa98d28" Sep 29 09:54:18 crc kubenswrapper[4779]: I0929 09:54:18.292332 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" event={"ID":"f1a5d3a7-37d9-4a87-864c-e4af7f504a19","Type":"ContainerStarted","Data":"e12405b2f177552bd75b0aa0910938d5b67c3d7c2041800b3176b0508ac82f97"} Sep 29 09:54:26 crc kubenswrapper[4779]: I0929 09:54:26.198106 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-9qk6k"] Sep 29 09:54:26 crc kubenswrapper[4779]: E0929 09:54:26.198986 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47448e3a-9037-4dd5-bee6-7f6f7118258b" containerName="extract-utilities" Sep 29 09:54:26 crc kubenswrapper[4779]: I0929 09:54:26.198999 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="47448e3a-9037-4dd5-bee6-7f6f7118258b" containerName="extract-utilities" Sep 29 09:54:26 crc kubenswrapper[4779]: E0929 09:54:26.199013 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47448e3a-9037-4dd5-bee6-7f6f7118258b" containerName="extract-content" Sep 29 09:54:26 crc kubenswrapper[4779]: I0929 09:54:26.199029 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="47448e3a-9037-4dd5-bee6-7f6f7118258b" containerName="extract-content" Sep 29 09:54:26 crc kubenswrapper[4779]: E0929 09:54:26.199050 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47448e3a-9037-4dd5-bee6-7f6f7118258b" containerName="registry-server" Sep 29 09:54:26 crc kubenswrapper[4779]: I0929 09:54:26.199056 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="47448e3a-9037-4dd5-bee6-7f6f7118258b" containerName="registry-server" Sep 29 09:54:26 crc kubenswrapper[4779]: I0929 09:54:26.199229 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="47448e3a-9037-4dd5-bee6-7f6f7118258b" containerName="registry-server" Sep 29 09:54:26 crc kubenswrapper[4779]: I0929 09:54:26.201081 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9qk6k" Sep 29 09:54:26 crc kubenswrapper[4779]: I0929 09:54:26.224836 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9qk6k"] Sep 29 09:54:26 crc kubenswrapper[4779]: I0929 09:54:26.396741 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2swn9\" (UniqueName: \"kubernetes.io/projected/6798f980-2fb8-4e39-8dbc-362bb775b256-kube-api-access-2swn9\") pod \"certified-operators-9qk6k\" (UID: \"6798f980-2fb8-4e39-8dbc-362bb775b256\") " pod="openshift-marketplace/certified-operators-9qk6k" Sep 29 09:54:26 crc kubenswrapper[4779]: I0929 09:54:26.396854 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6798f980-2fb8-4e39-8dbc-362bb775b256-catalog-content\") pod \"certified-operators-9qk6k\" (UID: \"6798f980-2fb8-4e39-8dbc-362bb775b256\") " pod="openshift-marketplace/certified-operators-9qk6k" Sep 29 09:54:26 crc kubenswrapper[4779]: I0929 09:54:26.396997 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6798f980-2fb8-4e39-8dbc-362bb775b256-utilities\") pod \"certified-operators-9qk6k\" (UID: \"6798f980-2fb8-4e39-8dbc-362bb775b256\") " pod="openshift-marketplace/certified-operators-9qk6k" Sep 29 09:54:26 crc kubenswrapper[4779]: I0929 09:54:26.498431 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6798f980-2fb8-4e39-8dbc-362bb775b256-utilities\") pod \"certified-operators-9qk6k\" (UID: \"6798f980-2fb8-4e39-8dbc-362bb775b256\") " pod="openshift-marketplace/certified-operators-9qk6k" Sep 29 09:54:26 crc kubenswrapper[4779]: I0929 09:54:26.498567 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2swn9\" (UniqueName: \"kubernetes.io/projected/6798f980-2fb8-4e39-8dbc-362bb775b256-kube-api-access-2swn9\") pod \"certified-operators-9qk6k\" (UID: \"6798f980-2fb8-4e39-8dbc-362bb775b256\") " pod="openshift-marketplace/certified-operators-9qk6k" Sep 29 09:54:26 crc kubenswrapper[4779]: I0929 09:54:26.498981 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6798f980-2fb8-4e39-8dbc-362bb775b256-utilities\") pod \"certified-operators-9qk6k\" (UID: \"6798f980-2fb8-4e39-8dbc-362bb775b256\") " pod="openshift-marketplace/certified-operators-9qk6k" Sep 29 09:54:26 crc kubenswrapper[4779]: I0929 09:54:26.499050 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6798f980-2fb8-4e39-8dbc-362bb775b256-catalog-content\") pod \"certified-operators-9qk6k\" (UID: \"6798f980-2fb8-4e39-8dbc-362bb775b256\") " pod="openshift-marketplace/certified-operators-9qk6k" Sep 29 09:54:26 crc kubenswrapper[4779]: I0929 09:54:26.499480 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6798f980-2fb8-4e39-8dbc-362bb775b256-catalog-content\") pod \"certified-operators-9qk6k\" (UID: \"6798f980-2fb8-4e39-8dbc-362bb775b256\") " pod="openshift-marketplace/certified-operators-9qk6k" Sep 29 09:54:26 crc kubenswrapper[4779]: I0929 09:54:26.521815 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2swn9\" (UniqueName: \"kubernetes.io/projected/6798f980-2fb8-4e39-8dbc-362bb775b256-kube-api-access-2swn9\") pod \"certified-operators-9qk6k\" (UID: \"6798f980-2fb8-4e39-8dbc-362bb775b256\") " pod="openshift-marketplace/certified-operators-9qk6k" Sep 29 09:54:26 crc kubenswrapper[4779]: I0929 09:54:26.531032 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9qk6k" Sep 29 09:54:27 crc kubenswrapper[4779]: I0929 09:54:27.039986 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9qk6k"] Sep 29 09:54:27 crc kubenswrapper[4779]: I0929 09:54:27.381961 4779 generic.go:334] "Generic (PLEG): container finished" podID="6798f980-2fb8-4e39-8dbc-362bb775b256" containerID="7dac5416f5e78977fe3351c47c1f38fff29d6eddf1b84ab177007ab3630a2507" exitCode=0 Sep 29 09:54:27 crc kubenswrapper[4779]: I0929 09:54:27.382003 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9qk6k" event={"ID":"6798f980-2fb8-4e39-8dbc-362bb775b256","Type":"ContainerDied","Data":"7dac5416f5e78977fe3351c47c1f38fff29d6eddf1b84ab177007ab3630a2507"} Sep 29 09:54:27 crc kubenswrapper[4779]: I0929 09:54:27.382041 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9qk6k" event={"ID":"6798f980-2fb8-4e39-8dbc-362bb775b256","Type":"ContainerStarted","Data":"c4a1aab17dcb10c10e1000260a5cf20c477365130983574b089577877570aa36"} Sep 29 09:54:28 crc kubenswrapper[4779]: I0929 09:54:28.391481 4779 generic.go:334] "Generic (PLEG): container finished" podID="6798f980-2fb8-4e39-8dbc-362bb775b256" containerID="a4ad8a060ad82f5208596bcfe67b63ce8928c115878dd55c3f8c014cd2eb31fe" exitCode=0 Sep 29 09:54:28 crc kubenswrapper[4779]: I0929 09:54:28.391680 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9qk6k" event={"ID":"6798f980-2fb8-4e39-8dbc-362bb775b256","Type":"ContainerDied","Data":"a4ad8a060ad82f5208596bcfe67b63ce8928c115878dd55c3f8c014cd2eb31fe"} Sep 29 09:54:29 crc kubenswrapper[4779]: I0929 09:54:29.403608 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9qk6k" event={"ID":"6798f980-2fb8-4e39-8dbc-362bb775b256","Type":"ContainerStarted","Data":"d22d102a60718e55b666012e1fffb09dffaffab1cfb38bd71c0fe3bd8ff1bd14"} Sep 29 09:54:29 crc kubenswrapper[4779]: I0929 09:54:29.429231 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-9qk6k" podStartSLOduration=2.004670404 podStartE2EDuration="3.429211402s" podCreationTimestamp="2025-09-29 09:54:26 +0000 UTC" firstStartedPulling="2025-09-29 09:54:27.383577752 +0000 UTC m=+1499.364901656" lastFinishedPulling="2025-09-29 09:54:28.80811874 +0000 UTC m=+1500.789442654" observedRunningTime="2025-09-29 09:54:29.418530352 +0000 UTC m=+1501.399854266" watchObservedRunningTime="2025-09-29 09:54:29.429211402 +0000 UTC m=+1501.410535316" Sep 29 09:54:36 crc kubenswrapper[4779]: I0929 09:54:36.531403 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-9qk6k" Sep 29 09:54:36 crc kubenswrapper[4779]: I0929 09:54:36.531960 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-9qk6k" Sep 29 09:54:36 crc kubenswrapper[4779]: I0929 09:54:36.582798 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-9qk6k" Sep 29 09:54:37 crc kubenswrapper[4779]: I0929 09:54:37.519595 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-9qk6k" Sep 29 09:54:37 crc kubenswrapper[4779]: I0929 09:54:37.573424 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-9qk6k"] Sep 29 09:54:39 crc kubenswrapper[4779]: I0929 09:54:39.492399 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-9qk6k" podUID="6798f980-2fb8-4e39-8dbc-362bb775b256" containerName="registry-server" containerID="cri-o://d22d102a60718e55b666012e1fffb09dffaffab1cfb38bd71c0fe3bd8ff1bd14" gracePeriod=2 Sep 29 09:54:39 crc kubenswrapper[4779]: I0929 09:54:39.935141 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9qk6k" Sep 29 09:54:40 crc kubenswrapper[4779]: I0929 09:54:40.092978 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2swn9\" (UniqueName: \"kubernetes.io/projected/6798f980-2fb8-4e39-8dbc-362bb775b256-kube-api-access-2swn9\") pod \"6798f980-2fb8-4e39-8dbc-362bb775b256\" (UID: \"6798f980-2fb8-4e39-8dbc-362bb775b256\") " Sep 29 09:54:40 crc kubenswrapper[4779]: I0929 09:54:40.093200 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6798f980-2fb8-4e39-8dbc-362bb775b256-utilities\") pod \"6798f980-2fb8-4e39-8dbc-362bb775b256\" (UID: \"6798f980-2fb8-4e39-8dbc-362bb775b256\") " Sep 29 09:54:40 crc kubenswrapper[4779]: I0929 09:54:40.093309 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6798f980-2fb8-4e39-8dbc-362bb775b256-catalog-content\") pod \"6798f980-2fb8-4e39-8dbc-362bb775b256\" (UID: \"6798f980-2fb8-4e39-8dbc-362bb775b256\") " Sep 29 09:54:40 crc kubenswrapper[4779]: I0929 09:54:40.094272 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6798f980-2fb8-4e39-8dbc-362bb775b256-utilities" (OuterVolumeSpecName: "utilities") pod "6798f980-2fb8-4e39-8dbc-362bb775b256" (UID: "6798f980-2fb8-4e39-8dbc-362bb775b256"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:54:40 crc kubenswrapper[4779]: I0929 09:54:40.109133 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6798f980-2fb8-4e39-8dbc-362bb775b256-kube-api-access-2swn9" (OuterVolumeSpecName: "kube-api-access-2swn9") pod "6798f980-2fb8-4e39-8dbc-362bb775b256" (UID: "6798f980-2fb8-4e39-8dbc-362bb775b256"). InnerVolumeSpecName "kube-api-access-2swn9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:54:40 crc kubenswrapper[4779]: I0929 09:54:40.173993 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6798f980-2fb8-4e39-8dbc-362bb775b256-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6798f980-2fb8-4e39-8dbc-362bb775b256" (UID: "6798f980-2fb8-4e39-8dbc-362bb775b256"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:54:40 crc kubenswrapper[4779]: I0929 09:54:40.197015 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6798f980-2fb8-4e39-8dbc-362bb775b256-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 09:54:40 crc kubenswrapper[4779]: I0929 09:54:40.197048 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2swn9\" (UniqueName: \"kubernetes.io/projected/6798f980-2fb8-4e39-8dbc-362bb775b256-kube-api-access-2swn9\") on node \"crc\" DevicePath \"\"" Sep 29 09:54:40 crc kubenswrapper[4779]: I0929 09:54:40.197059 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6798f980-2fb8-4e39-8dbc-362bb775b256-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 09:54:40 crc kubenswrapper[4779]: I0929 09:54:40.506866 4779 generic.go:334] "Generic (PLEG): container finished" podID="6798f980-2fb8-4e39-8dbc-362bb775b256" containerID="d22d102a60718e55b666012e1fffb09dffaffab1cfb38bd71c0fe3bd8ff1bd14" exitCode=0 Sep 29 09:54:40 crc kubenswrapper[4779]: I0929 09:54:40.506924 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9qk6k" event={"ID":"6798f980-2fb8-4e39-8dbc-362bb775b256","Type":"ContainerDied","Data":"d22d102a60718e55b666012e1fffb09dffaffab1cfb38bd71c0fe3bd8ff1bd14"} Sep 29 09:54:40 crc kubenswrapper[4779]: I0929 09:54:40.506972 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9qk6k" event={"ID":"6798f980-2fb8-4e39-8dbc-362bb775b256","Type":"ContainerDied","Data":"c4a1aab17dcb10c10e1000260a5cf20c477365130983574b089577877570aa36"} Sep 29 09:54:40 crc kubenswrapper[4779]: I0929 09:54:40.507001 4779 scope.go:117] "RemoveContainer" containerID="d22d102a60718e55b666012e1fffb09dffaffab1cfb38bd71c0fe3bd8ff1bd14" Sep 29 09:54:40 crc kubenswrapper[4779]: I0929 09:54:40.507013 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9qk6k" Sep 29 09:54:40 crc kubenswrapper[4779]: I0929 09:54:40.534636 4779 scope.go:117] "RemoveContainer" containerID="a4ad8a060ad82f5208596bcfe67b63ce8928c115878dd55c3f8c014cd2eb31fe" Sep 29 09:54:40 crc kubenswrapper[4779]: I0929 09:54:40.580835 4779 scope.go:117] "RemoveContainer" containerID="7dac5416f5e78977fe3351c47c1f38fff29d6eddf1b84ab177007ab3630a2507" Sep 29 09:54:40 crc kubenswrapper[4779]: I0929 09:54:40.582560 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-9qk6k"] Sep 29 09:54:40 crc kubenswrapper[4779]: I0929 09:54:40.591866 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-9qk6k"] Sep 29 09:54:40 crc kubenswrapper[4779]: I0929 09:54:40.606688 4779 scope.go:117] "RemoveContainer" containerID="d22d102a60718e55b666012e1fffb09dffaffab1cfb38bd71c0fe3bd8ff1bd14" Sep 29 09:54:40 crc kubenswrapper[4779]: E0929 09:54:40.607223 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d22d102a60718e55b666012e1fffb09dffaffab1cfb38bd71c0fe3bd8ff1bd14\": container with ID starting with d22d102a60718e55b666012e1fffb09dffaffab1cfb38bd71c0fe3bd8ff1bd14 not found: ID does not exist" containerID="d22d102a60718e55b666012e1fffb09dffaffab1cfb38bd71c0fe3bd8ff1bd14" Sep 29 09:54:40 crc kubenswrapper[4779]: I0929 09:54:40.607278 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d22d102a60718e55b666012e1fffb09dffaffab1cfb38bd71c0fe3bd8ff1bd14"} err="failed to get container status \"d22d102a60718e55b666012e1fffb09dffaffab1cfb38bd71c0fe3bd8ff1bd14\": rpc error: code = NotFound desc = could not find container \"d22d102a60718e55b666012e1fffb09dffaffab1cfb38bd71c0fe3bd8ff1bd14\": container with ID starting with d22d102a60718e55b666012e1fffb09dffaffab1cfb38bd71c0fe3bd8ff1bd14 not found: ID does not exist" Sep 29 09:54:40 crc kubenswrapper[4779]: I0929 09:54:40.607306 4779 scope.go:117] "RemoveContainer" containerID="a4ad8a060ad82f5208596bcfe67b63ce8928c115878dd55c3f8c014cd2eb31fe" Sep 29 09:54:40 crc kubenswrapper[4779]: E0929 09:54:40.607559 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a4ad8a060ad82f5208596bcfe67b63ce8928c115878dd55c3f8c014cd2eb31fe\": container with ID starting with a4ad8a060ad82f5208596bcfe67b63ce8928c115878dd55c3f8c014cd2eb31fe not found: ID does not exist" containerID="a4ad8a060ad82f5208596bcfe67b63ce8928c115878dd55c3f8c014cd2eb31fe" Sep 29 09:54:40 crc kubenswrapper[4779]: I0929 09:54:40.607608 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a4ad8a060ad82f5208596bcfe67b63ce8928c115878dd55c3f8c014cd2eb31fe"} err="failed to get container status \"a4ad8a060ad82f5208596bcfe67b63ce8928c115878dd55c3f8c014cd2eb31fe\": rpc error: code = NotFound desc = could not find container \"a4ad8a060ad82f5208596bcfe67b63ce8928c115878dd55c3f8c014cd2eb31fe\": container with ID starting with a4ad8a060ad82f5208596bcfe67b63ce8928c115878dd55c3f8c014cd2eb31fe not found: ID does not exist" Sep 29 09:54:40 crc kubenswrapper[4779]: I0929 09:54:40.607627 4779 scope.go:117] "RemoveContainer" containerID="7dac5416f5e78977fe3351c47c1f38fff29d6eddf1b84ab177007ab3630a2507" Sep 29 09:54:40 crc kubenswrapper[4779]: E0929 09:54:40.608069 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7dac5416f5e78977fe3351c47c1f38fff29d6eddf1b84ab177007ab3630a2507\": container with ID starting with 7dac5416f5e78977fe3351c47c1f38fff29d6eddf1b84ab177007ab3630a2507 not found: ID does not exist" containerID="7dac5416f5e78977fe3351c47c1f38fff29d6eddf1b84ab177007ab3630a2507" Sep 29 09:54:40 crc kubenswrapper[4779]: I0929 09:54:40.608118 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7dac5416f5e78977fe3351c47c1f38fff29d6eddf1b84ab177007ab3630a2507"} err="failed to get container status \"7dac5416f5e78977fe3351c47c1f38fff29d6eddf1b84ab177007ab3630a2507\": rpc error: code = NotFound desc = could not find container \"7dac5416f5e78977fe3351c47c1f38fff29d6eddf1b84ab177007ab3630a2507\": container with ID starting with 7dac5416f5e78977fe3351c47c1f38fff29d6eddf1b84ab177007ab3630a2507 not found: ID does not exist" Sep 29 09:54:40 crc kubenswrapper[4779]: I0929 09:54:40.725335 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6798f980-2fb8-4e39-8dbc-362bb775b256" path="/var/lib/kubelet/pods/6798f980-2fb8-4e39-8dbc-362bb775b256/volumes" Sep 29 09:55:20 crc kubenswrapper[4779]: I0929 09:55:20.910594 4779 generic.go:334] "Generic (PLEG): container finished" podID="14b18262-5ab6-43d1-8477-04f85881e4d0" containerID="c7d76ce67495d2b1394c7e8c57f79933637ffa4cf59bd795f5711b1d4023617a" exitCode=0 Sep 29 09:55:20 crc kubenswrapper[4779]: I0929 09:55:20.910655 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-lflhq" event={"ID":"14b18262-5ab6-43d1-8477-04f85881e4d0","Type":"ContainerDied","Data":"c7d76ce67495d2b1394c7e8c57f79933637ffa4cf59bd795f5711b1d4023617a"} Sep 29 09:55:22 crc kubenswrapper[4779]: I0929 09:55:22.307398 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-lflhq" Sep 29 09:55:22 crc kubenswrapper[4779]: I0929 09:55:22.315408 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/14b18262-5ab6-43d1-8477-04f85881e4d0-ssh-key\") pod \"14b18262-5ab6-43d1-8477-04f85881e4d0\" (UID: \"14b18262-5ab6-43d1-8477-04f85881e4d0\") " Sep 29 09:55:22 crc kubenswrapper[4779]: I0929 09:55:22.315445 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kdlvg\" (UniqueName: \"kubernetes.io/projected/14b18262-5ab6-43d1-8477-04f85881e4d0-kube-api-access-kdlvg\") pod \"14b18262-5ab6-43d1-8477-04f85881e4d0\" (UID: \"14b18262-5ab6-43d1-8477-04f85881e4d0\") " Sep 29 09:55:22 crc kubenswrapper[4779]: I0929 09:55:22.315543 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14b18262-5ab6-43d1-8477-04f85881e4d0-bootstrap-combined-ca-bundle\") pod \"14b18262-5ab6-43d1-8477-04f85881e4d0\" (UID: \"14b18262-5ab6-43d1-8477-04f85881e4d0\") " Sep 29 09:55:22 crc kubenswrapper[4779]: I0929 09:55:22.315603 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/14b18262-5ab6-43d1-8477-04f85881e4d0-inventory\") pod \"14b18262-5ab6-43d1-8477-04f85881e4d0\" (UID: \"14b18262-5ab6-43d1-8477-04f85881e4d0\") " Sep 29 09:55:22 crc kubenswrapper[4779]: I0929 09:55:22.322289 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14b18262-5ab6-43d1-8477-04f85881e4d0-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "14b18262-5ab6-43d1-8477-04f85881e4d0" (UID: "14b18262-5ab6-43d1-8477-04f85881e4d0"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:55:22 crc kubenswrapper[4779]: I0929 09:55:22.338406 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/14b18262-5ab6-43d1-8477-04f85881e4d0-kube-api-access-kdlvg" (OuterVolumeSpecName: "kube-api-access-kdlvg") pod "14b18262-5ab6-43d1-8477-04f85881e4d0" (UID: "14b18262-5ab6-43d1-8477-04f85881e4d0"). InnerVolumeSpecName "kube-api-access-kdlvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:55:22 crc kubenswrapper[4779]: I0929 09:55:22.356070 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14b18262-5ab6-43d1-8477-04f85881e4d0-inventory" (OuterVolumeSpecName: "inventory") pod "14b18262-5ab6-43d1-8477-04f85881e4d0" (UID: "14b18262-5ab6-43d1-8477-04f85881e4d0"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:55:22 crc kubenswrapper[4779]: I0929 09:55:22.360082 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14b18262-5ab6-43d1-8477-04f85881e4d0-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "14b18262-5ab6-43d1-8477-04f85881e4d0" (UID: "14b18262-5ab6-43d1-8477-04f85881e4d0"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:55:22 crc kubenswrapper[4779]: I0929 09:55:22.417980 4779 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/14b18262-5ab6-43d1-8477-04f85881e4d0-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 09:55:22 crc kubenswrapper[4779]: I0929 09:55:22.418185 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kdlvg\" (UniqueName: \"kubernetes.io/projected/14b18262-5ab6-43d1-8477-04f85881e4d0-kube-api-access-kdlvg\") on node \"crc\" DevicePath \"\"" Sep 29 09:55:22 crc kubenswrapper[4779]: I0929 09:55:22.418328 4779 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14b18262-5ab6-43d1-8477-04f85881e4d0-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 09:55:22 crc kubenswrapper[4779]: I0929 09:55:22.418433 4779 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/14b18262-5ab6-43d1-8477-04f85881e4d0-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 09:55:22 crc kubenswrapper[4779]: I0929 09:55:22.935241 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-lflhq" event={"ID":"14b18262-5ab6-43d1-8477-04f85881e4d0","Type":"ContainerDied","Data":"c070a4a1a84ff8093ddfe996d234071357b038376ee269c340ded8fefa2df55d"} Sep 29 09:55:22 crc kubenswrapper[4779]: I0929 09:55:22.935289 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c070a4a1a84ff8093ddfe996d234071357b038376ee269c340ded8fefa2df55d" Sep 29 09:55:22 crc kubenswrapper[4779]: I0929 09:55:22.935361 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-lflhq" Sep 29 09:55:23 crc kubenswrapper[4779]: I0929 09:55:23.007569 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nxx5t"] Sep 29 09:55:23 crc kubenswrapper[4779]: E0929 09:55:23.008043 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14b18262-5ab6-43d1-8477-04f85881e4d0" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Sep 29 09:55:23 crc kubenswrapper[4779]: I0929 09:55:23.008061 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="14b18262-5ab6-43d1-8477-04f85881e4d0" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Sep 29 09:55:23 crc kubenswrapper[4779]: E0929 09:55:23.008080 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6798f980-2fb8-4e39-8dbc-362bb775b256" containerName="extract-utilities" Sep 29 09:55:23 crc kubenswrapper[4779]: I0929 09:55:23.008089 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="6798f980-2fb8-4e39-8dbc-362bb775b256" containerName="extract-utilities" Sep 29 09:55:23 crc kubenswrapper[4779]: E0929 09:55:23.008105 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6798f980-2fb8-4e39-8dbc-362bb775b256" containerName="extract-content" Sep 29 09:55:23 crc kubenswrapper[4779]: I0929 09:55:23.008112 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="6798f980-2fb8-4e39-8dbc-362bb775b256" containerName="extract-content" Sep 29 09:55:23 crc kubenswrapper[4779]: E0929 09:55:23.008133 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6798f980-2fb8-4e39-8dbc-362bb775b256" containerName="registry-server" Sep 29 09:55:23 crc kubenswrapper[4779]: I0929 09:55:23.008139 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="6798f980-2fb8-4e39-8dbc-362bb775b256" containerName="registry-server" Sep 29 09:55:23 crc kubenswrapper[4779]: I0929 09:55:23.008333 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="6798f980-2fb8-4e39-8dbc-362bb775b256" containerName="registry-server" Sep 29 09:55:23 crc kubenswrapper[4779]: I0929 09:55:23.008365 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="14b18262-5ab6-43d1-8477-04f85881e4d0" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Sep 29 09:55:23 crc kubenswrapper[4779]: I0929 09:55:23.009241 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nxx5t" Sep 29 09:55:23 crc kubenswrapper[4779]: I0929 09:55:23.014170 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 09:55:23 crc kubenswrapper[4779]: I0929 09:55:23.014553 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-l7nsb" Sep 29 09:55:23 crc kubenswrapper[4779]: I0929 09:55:23.014934 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 09:55:23 crc kubenswrapper[4779]: I0929 09:55:23.015390 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 09:55:23 crc kubenswrapper[4779]: I0929 09:55:23.017774 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nxx5t"] Sep 29 09:55:23 crc kubenswrapper[4779]: I0929 09:55:23.129246 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cadc8662-a6be-4cd3-8042-9bb980421260-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-nxx5t\" (UID: \"cadc8662-a6be-4cd3-8042-9bb980421260\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nxx5t" Sep 29 09:55:23 crc kubenswrapper[4779]: I0929 09:55:23.129627 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6kxq6\" (UniqueName: \"kubernetes.io/projected/cadc8662-a6be-4cd3-8042-9bb980421260-kube-api-access-6kxq6\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-nxx5t\" (UID: \"cadc8662-a6be-4cd3-8042-9bb980421260\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nxx5t" Sep 29 09:55:23 crc kubenswrapper[4779]: I0929 09:55:23.129834 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cadc8662-a6be-4cd3-8042-9bb980421260-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-nxx5t\" (UID: \"cadc8662-a6be-4cd3-8042-9bb980421260\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nxx5t" Sep 29 09:55:23 crc kubenswrapper[4779]: I0929 09:55:23.232180 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cadc8662-a6be-4cd3-8042-9bb980421260-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-nxx5t\" (UID: \"cadc8662-a6be-4cd3-8042-9bb980421260\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nxx5t" Sep 29 09:55:23 crc kubenswrapper[4779]: I0929 09:55:23.232284 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cadc8662-a6be-4cd3-8042-9bb980421260-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-nxx5t\" (UID: \"cadc8662-a6be-4cd3-8042-9bb980421260\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nxx5t" Sep 29 09:55:23 crc kubenswrapper[4779]: I0929 09:55:23.232343 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6kxq6\" (UniqueName: \"kubernetes.io/projected/cadc8662-a6be-4cd3-8042-9bb980421260-kube-api-access-6kxq6\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-nxx5t\" (UID: \"cadc8662-a6be-4cd3-8042-9bb980421260\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nxx5t" Sep 29 09:55:23 crc kubenswrapper[4779]: I0929 09:55:23.238659 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cadc8662-a6be-4cd3-8042-9bb980421260-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-nxx5t\" (UID: \"cadc8662-a6be-4cd3-8042-9bb980421260\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nxx5t" Sep 29 09:55:23 crc kubenswrapper[4779]: I0929 09:55:23.239098 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cadc8662-a6be-4cd3-8042-9bb980421260-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-nxx5t\" (UID: \"cadc8662-a6be-4cd3-8042-9bb980421260\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nxx5t" Sep 29 09:55:23 crc kubenswrapper[4779]: I0929 09:55:23.250821 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6kxq6\" (UniqueName: \"kubernetes.io/projected/cadc8662-a6be-4cd3-8042-9bb980421260-kube-api-access-6kxq6\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-nxx5t\" (UID: \"cadc8662-a6be-4cd3-8042-9bb980421260\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nxx5t" Sep 29 09:55:23 crc kubenswrapper[4779]: I0929 09:55:23.327321 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nxx5t" Sep 29 09:55:23 crc kubenswrapper[4779]: I0929 09:55:23.834398 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nxx5t"] Sep 29 09:55:23 crc kubenswrapper[4779]: I0929 09:55:23.844443 4779 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 09:55:23 crc kubenswrapper[4779]: I0929 09:55:23.954988 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nxx5t" event={"ID":"cadc8662-a6be-4cd3-8042-9bb980421260","Type":"ContainerStarted","Data":"5f4603022aedff3cb4194373a5e238331d0f05c7d2f506ab1736f6c19f1f36ed"} Sep 29 09:55:24 crc kubenswrapper[4779]: I0929 09:55:24.967381 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nxx5t" event={"ID":"cadc8662-a6be-4cd3-8042-9bb980421260","Type":"ContainerStarted","Data":"fb5c444fdeaa272950679204b6ef3e50e5899685ad0710f129afe88cac946f7d"} Sep 29 09:55:24 crc kubenswrapper[4779]: I0929 09:55:24.998101 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nxx5t" podStartSLOduration=2.567858979 podStartE2EDuration="2.998083109s" podCreationTimestamp="2025-09-29 09:55:22 +0000 UTC" firstStartedPulling="2025-09-29 09:55:23.84419287 +0000 UTC m=+1555.825516774" lastFinishedPulling="2025-09-29 09:55:24.274417 +0000 UTC m=+1556.255740904" observedRunningTime="2025-09-29 09:55:24.994583648 +0000 UTC m=+1556.975907572" watchObservedRunningTime="2025-09-29 09:55:24.998083109 +0000 UTC m=+1556.979407013" Sep 29 09:55:37 crc kubenswrapper[4779]: I0929 09:55:37.886885 4779 scope.go:117] "RemoveContainer" containerID="02e33785d22b991ef6c158db19b8e40437b7de2fa1dfea4c5084e90fd547ca99" Sep 29 09:55:37 crc kubenswrapper[4779]: I0929 09:55:37.914787 4779 scope.go:117] "RemoveContainer" containerID="fddeffb5978318ff4a7ea04690b0e1223843c5207c79250f72bf88f3f6593ef7" Sep 29 09:55:53 crc kubenswrapper[4779]: I0929 09:55:53.045286 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-sd786"] Sep 29 09:55:53 crc kubenswrapper[4779]: I0929 09:55:53.054582 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-sd786"] Sep 29 09:55:54 crc kubenswrapper[4779]: I0929 09:55:54.034873 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-db-create-c9jzr"] Sep 29 09:55:54 crc kubenswrapper[4779]: I0929 09:55:54.046852 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/watcher-db-create-c9jzr"] Sep 29 09:55:54 crc kubenswrapper[4779]: I0929 09:55:54.729189 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bb3e5b11-382a-4ff0-af29-11a3573ff188" path="/var/lib/kubelet/pods/bb3e5b11-382a-4ff0-af29-11a3573ff188/volumes" Sep 29 09:55:54 crc kubenswrapper[4779]: I0929 09:55:54.730699 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ff694e86-4a85-4102-9aff-8c91a7bb9106" path="/var/lib/kubelet/pods/ff694e86-4a85-4102-9aff-8c91a7bb9106/volumes" Sep 29 09:55:55 crc kubenswrapper[4779]: I0929 09:55:55.026857 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-dgjbv"] Sep 29 09:55:55 crc kubenswrapper[4779]: I0929 09:55:55.028959 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dgjbv" Sep 29 09:55:55 crc kubenswrapper[4779]: I0929 09:55:55.040505 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-dgjbv"] Sep 29 09:55:55 crc kubenswrapper[4779]: I0929 09:55:55.187056 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2195e5e0-40dc-4715-ace3-a42325cd8788-utilities\") pod \"redhat-marketplace-dgjbv\" (UID: \"2195e5e0-40dc-4715-ace3-a42325cd8788\") " pod="openshift-marketplace/redhat-marketplace-dgjbv" Sep 29 09:55:55 crc kubenswrapper[4779]: I0929 09:55:55.187140 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2195e5e0-40dc-4715-ace3-a42325cd8788-catalog-content\") pod \"redhat-marketplace-dgjbv\" (UID: \"2195e5e0-40dc-4715-ace3-a42325cd8788\") " pod="openshift-marketplace/redhat-marketplace-dgjbv" Sep 29 09:55:55 crc kubenswrapper[4779]: I0929 09:55:55.187332 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cscqv\" (UniqueName: \"kubernetes.io/projected/2195e5e0-40dc-4715-ace3-a42325cd8788-kube-api-access-cscqv\") pod \"redhat-marketplace-dgjbv\" (UID: \"2195e5e0-40dc-4715-ace3-a42325cd8788\") " pod="openshift-marketplace/redhat-marketplace-dgjbv" Sep 29 09:55:55 crc kubenswrapper[4779]: I0929 09:55:55.288854 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2195e5e0-40dc-4715-ace3-a42325cd8788-catalog-content\") pod \"redhat-marketplace-dgjbv\" (UID: \"2195e5e0-40dc-4715-ace3-a42325cd8788\") " pod="openshift-marketplace/redhat-marketplace-dgjbv" Sep 29 09:55:55 crc kubenswrapper[4779]: I0929 09:55:55.289246 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cscqv\" (UniqueName: \"kubernetes.io/projected/2195e5e0-40dc-4715-ace3-a42325cd8788-kube-api-access-cscqv\") pod \"redhat-marketplace-dgjbv\" (UID: \"2195e5e0-40dc-4715-ace3-a42325cd8788\") " pod="openshift-marketplace/redhat-marketplace-dgjbv" Sep 29 09:55:55 crc kubenswrapper[4779]: I0929 09:55:55.289395 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2195e5e0-40dc-4715-ace3-a42325cd8788-catalog-content\") pod \"redhat-marketplace-dgjbv\" (UID: \"2195e5e0-40dc-4715-ace3-a42325cd8788\") " pod="openshift-marketplace/redhat-marketplace-dgjbv" Sep 29 09:55:55 crc kubenswrapper[4779]: I0929 09:55:55.289577 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2195e5e0-40dc-4715-ace3-a42325cd8788-utilities\") pod \"redhat-marketplace-dgjbv\" (UID: \"2195e5e0-40dc-4715-ace3-a42325cd8788\") " pod="openshift-marketplace/redhat-marketplace-dgjbv" Sep 29 09:55:55 crc kubenswrapper[4779]: I0929 09:55:55.289845 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2195e5e0-40dc-4715-ace3-a42325cd8788-utilities\") pod \"redhat-marketplace-dgjbv\" (UID: \"2195e5e0-40dc-4715-ace3-a42325cd8788\") " pod="openshift-marketplace/redhat-marketplace-dgjbv" Sep 29 09:55:55 crc kubenswrapper[4779]: I0929 09:55:55.308509 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cscqv\" (UniqueName: \"kubernetes.io/projected/2195e5e0-40dc-4715-ace3-a42325cd8788-kube-api-access-cscqv\") pod \"redhat-marketplace-dgjbv\" (UID: \"2195e5e0-40dc-4715-ace3-a42325cd8788\") " pod="openshift-marketplace/redhat-marketplace-dgjbv" Sep 29 09:55:55 crc kubenswrapper[4779]: I0929 09:55:55.403336 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dgjbv" Sep 29 09:55:55 crc kubenswrapper[4779]: I0929 09:55:55.846142 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-dgjbv"] Sep 29 09:55:56 crc kubenswrapper[4779]: I0929 09:55:56.293692 4779 generic.go:334] "Generic (PLEG): container finished" podID="2195e5e0-40dc-4715-ace3-a42325cd8788" containerID="b92381e228381cdbedc068b4c8a75ce3c653506709456a0350874555e07b915a" exitCode=0 Sep 29 09:55:56 crc kubenswrapper[4779]: I0929 09:55:56.293764 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dgjbv" event={"ID":"2195e5e0-40dc-4715-ace3-a42325cd8788","Type":"ContainerDied","Data":"b92381e228381cdbedc068b4c8a75ce3c653506709456a0350874555e07b915a"} Sep 29 09:55:56 crc kubenswrapper[4779]: I0929 09:55:56.294047 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dgjbv" event={"ID":"2195e5e0-40dc-4715-ace3-a42325cd8788","Type":"ContainerStarted","Data":"7c878f56f40099e70be0d5a7e67f9586621252e2cb8b16ad72e7fffcd0ac9414"} Sep 29 09:55:57 crc kubenswrapper[4779]: I0929 09:55:57.303325 4779 generic.go:334] "Generic (PLEG): container finished" podID="2195e5e0-40dc-4715-ace3-a42325cd8788" containerID="1e6c11b1086605c3e6e0b679397b2eee7eecdb743d2bbc3a06df1ebed5ed8766" exitCode=0 Sep 29 09:55:57 crc kubenswrapper[4779]: I0929 09:55:57.303497 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dgjbv" event={"ID":"2195e5e0-40dc-4715-ace3-a42325cd8788","Type":"ContainerDied","Data":"1e6c11b1086605c3e6e0b679397b2eee7eecdb743d2bbc3a06df1ebed5ed8766"} Sep 29 09:55:58 crc kubenswrapper[4779]: I0929 09:55:58.313569 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dgjbv" event={"ID":"2195e5e0-40dc-4715-ace3-a42325cd8788","Type":"ContainerStarted","Data":"7df58b17fe49b4d3350bac4253ed8f7023e6a954fa78a6718f28a4b685cf7c25"} Sep 29 09:55:58 crc kubenswrapper[4779]: I0929 09:55:58.331669 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-dgjbv" podStartSLOduration=1.838830004 podStartE2EDuration="3.331658025s" podCreationTimestamp="2025-09-29 09:55:55 +0000 UTC" firstStartedPulling="2025-09-29 09:55:56.296517929 +0000 UTC m=+1588.277841843" lastFinishedPulling="2025-09-29 09:55:57.78934594 +0000 UTC m=+1589.770669864" observedRunningTime="2025-09-29 09:55:58.330996226 +0000 UTC m=+1590.312320130" watchObservedRunningTime="2025-09-29 09:55:58.331658025 +0000 UTC m=+1590.312981929" Sep 29 09:56:02 crc kubenswrapper[4779]: I0929 09:56:02.033244 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-sr2s6"] Sep 29 09:56:02 crc kubenswrapper[4779]: I0929 09:56:02.048041 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-q8jgk"] Sep 29 09:56:02 crc kubenswrapper[4779]: I0929 09:56:02.057408 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-sr2s6"] Sep 29 09:56:02 crc kubenswrapper[4779]: I0929 09:56:02.067812 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-q8jgk"] Sep 29 09:56:02 crc kubenswrapper[4779]: I0929 09:56:02.732154 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="390b7729-1b5b-4581-98c4-decf700653e1" path="/var/lib/kubelet/pods/390b7729-1b5b-4581-98c4-decf700653e1/volumes" Sep 29 09:56:02 crc kubenswrapper[4779]: I0929 09:56:02.733667 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7180a99e-b8b5-4c57-9bbe-221f6b83bb16" path="/var/lib/kubelet/pods/7180a99e-b8b5-4c57-9bbe-221f6b83bb16/volumes" Sep 29 09:56:03 crc kubenswrapper[4779]: I0929 09:56:03.042195 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-7fff-account-create-zgx74"] Sep 29 09:56:03 crc kubenswrapper[4779]: I0929 09:56:03.051710 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-7fff-account-create-zgx74"] Sep 29 09:56:04 crc kubenswrapper[4779]: I0929 09:56:04.046230 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-0ed1-account-create-lmlvp"] Sep 29 09:56:04 crc kubenswrapper[4779]: I0929 09:56:04.054857 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/watcher-0ed1-account-create-lmlvp"] Sep 29 09:56:04 crc kubenswrapper[4779]: I0929 09:56:04.742864 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308b21fc-c8dc-4f88-a842-a5517b2bb2e9" path="/var/lib/kubelet/pods/308b21fc-c8dc-4f88-a842-a5517b2bb2e9/volumes" Sep 29 09:56:04 crc kubenswrapper[4779]: I0929 09:56:04.743684 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b4c0f69c-d549-4d1d-b893-01aac180cf13" path="/var/lib/kubelet/pods/b4c0f69c-d549-4d1d-b893-01aac180cf13/volumes" Sep 29 09:56:05 crc kubenswrapper[4779]: I0929 09:56:05.404181 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-dgjbv" Sep 29 09:56:05 crc kubenswrapper[4779]: I0929 09:56:05.405395 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-dgjbv" Sep 29 09:56:05 crc kubenswrapper[4779]: I0929 09:56:05.464368 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-dgjbv" Sep 29 09:56:06 crc kubenswrapper[4779]: I0929 09:56:06.458351 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-dgjbv" Sep 29 09:56:06 crc kubenswrapper[4779]: I0929 09:56:06.527934 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-dgjbv"] Sep 29 09:56:08 crc kubenswrapper[4779]: I0929 09:56:08.418951 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-dgjbv" podUID="2195e5e0-40dc-4715-ace3-a42325cd8788" containerName="registry-server" containerID="cri-o://7df58b17fe49b4d3350bac4253ed8f7023e6a954fa78a6718f28a4b685cf7c25" gracePeriod=2 Sep 29 09:56:08 crc kubenswrapper[4779]: I0929 09:56:08.896427 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dgjbv" Sep 29 09:56:09 crc kubenswrapper[4779]: I0929 09:56:09.060991 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2195e5e0-40dc-4715-ace3-a42325cd8788-catalog-content\") pod \"2195e5e0-40dc-4715-ace3-a42325cd8788\" (UID: \"2195e5e0-40dc-4715-ace3-a42325cd8788\") " Sep 29 09:56:09 crc kubenswrapper[4779]: I0929 09:56:09.061072 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2195e5e0-40dc-4715-ace3-a42325cd8788-utilities\") pod \"2195e5e0-40dc-4715-ace3-a42325cd8788\" (UID: \"2195e5e0-40dc-4715-ace3-a42325cd8788\") " Sep 29 09:56:09 crc kubenswrapper[4779]: I0929 09:56:09.061305 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cscqv\" (UniqueName: \"kubernetes.io/projected/2195e5e0-40dc-4715-ace3-a42325cd8788-kube-api-access-cscqv\") pod \"2195e5e0-40dc-4715-ace3-a42325cd8788\" (UID: \"2195e5e0-40dc-4715-ace3-a42325cd8788\") " Sep 29 09:56:09 crc kubenswrapper[4779]: I0929 09:56:09.062471 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2195e5e0-40dc-4715-ace3-a42325cd8788-utilities" (OuterVolumeSpecName: "utilities") pod "2195e5e0-40dc-4715-ace3-a42325cd8788" (UID: "2195e5e0-40dc-4715-ace3-a42325cd8788"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:56:09 crc kubenswrapper[4779]: I0929 09:56:09.066700 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2195e5e0-40dc-4715-ace3-a42325cd8788-kube-api-access-cscqv" (OuterVolumeSpecName: "kube-api-access-cscqv") pod "2195e5e0-40dc-4715-ace3-a42325cd8788" (UID: "2195e5e0-40dc-4715-ace3-a42325cd8788"). InnerVolumeSpecName "kube-api-access-cscqv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:56:09 crc kubenswrapper[4779]: I0929 09:56:09.073424 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2195e5e0-40dc-4715-ace3-a42325cd8788-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2195e5e0-40dc-4715-ace3-a42325cd8788" (UID: "2195e5e0-40dc-4715-ace3-a42325cd8788"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 09:56:09 crc kubenswrapper[4779]: I0929 09:56:09.164103 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2195e5e0-40dc-4715-ace3-a42325cd8788-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 09:56:09 crc kubenswrapper[4779]: I0929 09:56:09.164149 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2195e5e0-40dc-4715-ace3-a42325cd8788-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 09:56:09 crc kubenswrapper[4779]: I0929 09:56:09.164163 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cscqv\" (UniqueName: \"kubernetes.io/projected/2195e5e0-40dc-4715-ace3-a42325cd8788-kube-api-access-cscqv\") on node \"crc\" DevicePath \"\"" Sep 29 09:56:09 crc kubenswrapper[4779]: I0929 09:56:09.435174 4779 generic.go:334] "Generic (PLEG): container finished" podID="2195e5e0-40dc-4715-ace3-a42325cd8788" containerID="7df58b17fe49b4d3350bac4253ed8f7023e6a954fa78a6718f28a4b685cf7c25" exitCode=0 Sep 29 09:56:09 crc kubenswrapper[4779]: I0929 09:56:09.435239 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dgjbv" event={"ID":"2195e5e0-40dc-4715-ace3-a42325cd8788","Type":"ContainerDied","Data":"7df58b17fe49b4d3350bac4253ed8f7023e6a954fa78a6718f28a4b685cf7c25"} Sep 29 09:56:09 crc kubenswrapper[4779]: I0929 09:56:09.435295 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dgjbv" event={"ID":"2195e5e0-40dc-4715-ace3-a42325cd8788","Type":"ContainerDied","Data":"7c878f56f40099e70be0d5a7e67f9586621252e2cb8b16ad72e7fffcd0ac9414"} Sep 29 09:56:09 crc kubenswrapper[4779]: I0929 09:56:09.435318 4779 scope.go:117] "RemoveContainer" containerID="7df58b17fe49b4d3350bac4253ed8f7023e6a954fa78a6718f28a4b685cf7c25" Sep 29 09:56:09 crc kubenswrapper[4779]: I0929 09:56:09.435296 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dgjbv" Sep 29 09:56:09 crc kubenswrapper[4779]: I0929 09:56:09.456807 4779 scope.go:117] "RemoveContainer" containerID="1e6c11b1086605c3e6e0b679397b2eee7eecdb743d2bbc3a06df1ebed5ed8766" Sep 29 09:56:09 crc kubenswrapper[4779]: I0929 09:56:09.484636 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-dgjbv"] Sep 29 09:56:09 crc kubenswrapper[4779]: I0929 09:56:09.495267 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-dgjbv"] Sep 29 09:56:09 crc kubenswrapper[4779]: I0929 09:56:09.497261 4779 scope.go:117] "RemoveContainer" containerID="b92381e228381cdbedc068b4c8a75ce3c653506709456a0350874555e07b915a" Sep 29 09:56:09 crc kubenswrapper[4779]: I0929 09:56:09.526593 4779 scope.go:117] "RemoveContainer" containerID="7df58b17fe49b4d3350bac4253ed8f7023e6a954fa78a6718f28a4b685cf7c25" Sep 29 09:56:09 crc kubenswrapper[4779]: E0929 09:56:09.527179 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7df58b17fe49b4d3350bac4253ed8f7023e6a954fa78a6718f28a4b685cf7c25\": container with ID starting with 7df58b17fe49b4d3350bac4253ed8f7023e6a954fa78a6718f28a4b685cf7c25 not found: ID does not exist" containerID="7df58b17fe49b4d3350bac4253ed8f7023e6a954fa78a6718f28a4b685cf7c25" Sep 29 09:56:09 crc kubenswrapper[4779]: I0929 09:56:09.527219 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7df58b17fe49b4d3350bac4253ed8f7023e6a954fa78a6718f28a4b685cf7c25"} err="failed to get container status \"7df58b17fe49b4d3350bac4253ed8f7023e6a954fa78a6718f28a4b685cf7c25\": rpc error: code = NotFound desc = could not find container \"7df58b17fe49b4d3350bac4253ed8f7023e6a954fa78a6718f28a4b685cf7c25\": container with ID starting with 7df58b17fe49b4d3350bac4253ed8f7023e6a954fa78a6718f28a4b685cf7c25 not found: ID does not exist" Sep 29 09:56:09 crc kubenswrapper[4779]: I0929 09:56:09.527248 4779 scope.go:117] "RemoveContainer" containerID="1e6c11b1086605c3e6e0b679397b2eee7eecdb743d2bbc3a06df1ebed5ed8766" Sep 29 09:56:09 crc kubenswrapper[4779]: E0929 09:56:09.527617 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1e6c11b1086605c3e6e0b679397b2eee7eecdb743d2bbc3a06df1ebed5ed8766\": container with ID starting with 1e6c11b1086605c3e6e0b679397b2eee7eecdb743d2bbc3a06df1ebed5ed8766 not found: ID does not exist" containerID="1e6c11b1086605c3e6e0b679397b2eee7eecdb743d2bbc3a06df1ebed5ed8766" Sep 29 09:56:09 crc kubenswrapper[4779]: I0929 09:56:09.527658 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e6c11b1086605c3e6e0b679397b2eee7eecdb743d2bbc3a06df1ebed5ed8766"} err="failed to get container status \"1e6c11b1086605c3e6e0b679397b2eee7eecdb743d2bbc3a06df1ebed5ed8766\": rpc error: code = NotFound desc = could not find container \"1e6c11b1086605c3e6e0b679397b2eee7eecdb743d2bbc3a06df1ebed5ed8766\": container with ID starting with 1e6c11b1086605c3e6e0b679397b2eee7eecdb743d2bbc3a06df1ebed5ed8766 not found: ID does not exist" Sep 29 09:56:09 crc kubenswrapper[4779]: I0929 09:56:09.527686 4779 scope.go:117] "RemoveContainer" containerID="b92381e228381cdbedc068b4c8a75ce3c653506709456a0350874555e07b915a" Sep 29 09:56:09 crc kubenswrapper[4779]: E0929 09:56:09.528124 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b92381e228381cdbedc068b4c8a75ce3c653506709456a0350874555e07b915a\": container with ID starting with b92381e228381cdbedc068b4c8a75ce3c653506709456a0350874555e07b915a not found: ID does not exist" containerID="b92381e228381cdbedc068b4c8a75ce3c653506709456a0350874555e07b915a" Sep 29 09:56:09 crc kubenswrapper[4779]: I0929 09:56:09.528162 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b92381e228381cdbedc068b4c8a75ce3c653506709456a0350874555e07b915a"} err="failed to get container status \"b92381e228381cdbedc068b4c8a75ce3c653506709456a0350874555e07b915a\": rpc error: code = NotFound desc = could not find container \"b92381e228381cdbedc068b4c8a75ce3c653506709456a0350874555e07b915a\": container with ID starting with b92381e228381cdbedc068b4c8a75ce3c653506709456a0350874555e07b915a not found: ID does not exist" Sep 29 09:56:10 crc kubenswrapper[4779]: I0929 09:56:10.733652 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2195e5e0-40dc-4715-ace3-a42325cd8788" path="/var/lib/kubelet/pods/2195e5e0-40dc-4715-ace3-a42325cd8788/volumes" Sep 29 09:56:18 crc kubenswrapper[4779]: I0929 09:56:18.039578 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-5200-account-create-lfnrs"] Sep 29 09:56:18 crc kubenswrapper[4779]: I0929 09:56:18.052093 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-9c17-account-create-77v8l"] Sep 29 09:56:18 crc kubenswrapper[4779]: I0929 09:56:18.059577 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-9c17-account-create-77v8l"] Sep 29 09:56:18 crc kubenswrapper[4779]: I0929 09:56:18.066979 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-5200-account-create-lfnrs"] Sep 29 09:56:18 crc kubenswrapper[4779]: I0929 09:56:18.724457 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2d815e2e-6933-4792-97ef-b22c5d1df8d0" path="/var/lib/kubelet/pods/2d815e2e-6933-4792-97ef-b22c5d1df8d0/volumes" Sep 29 09:56:18 crc kubenswrapper[4779]: I0929 09:56:18.725037 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8fdebe28-420d-4e9c-bc14-cd59fcb284bd" path="/var/lib/kubelet/pods/8fdebe28-420d-4e9c-bc14-cd59fcb284bd/volumes" Sep 29 09:56:33 crc kubenswrapper[4779]: I0929 09:56:33.042667 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-c9wj9"] Sep 29 09:56:33 crc kubenswrapper[4779]: I0929 09:56:33.059207 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-c9wj9"] Sep 29 09:56:33 crc kubenswrapper[4779]: I0929 09:56:33.072014 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-g7g8x"] Sep 29 09:56:33 crc kubenswrapper[4779]: I0929 09:56:33.080857 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-g7g8x"] Sep 29 09:56:34 crc kubenswrapper[4779]: I0929 09:56:34.725062 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1c3d7dc4-b829-4340-abb5-449b3dcd606f" path="/var/lib/kubelet/pods/1c3d7dc4-b829-4340-abb5-449b3dcd606f/volumes" Sep 29 09:56:34 crc kubenswrapper[4779]: I0929 09:56:34.725855 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9775652b-71da-489b-9105-d795a7feb2cb" path="/var/lib/kubelet/pods/9775652b-71da-489b-9105-d795a7feb2cb/volumes" Sep 29 09:56:36 crc kubenswrapper[4779]: I0929 09:56:36.031496 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-g94ck"] Sep 29 09:56:36 crc kubenswrapper[4779]: I0929 09:56:36.041009 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-g94ck"] Sep 29 09:56:36 crc kubenswrapper[4779]: I0929 09:56:36.051287 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-dnnxk"] Sep 29 09:56:36 crc kubenswrapper[4779]: I0929 09:56:36.058364 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-dnnxk"] Sep 29 09:56:36 crc kubenswrapper[4779]: I0929 09:56:36.738237 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2bfe06c4-af74-448c-b10b-d2f7f62fc96a" path="/var/lib/kubelet/pods/2bfe06c4-af74-448c-b10b-d2f7f62fc96a/volumes" Sep 29 09:56:36 crc kubenswrapper[4779]: I0929 09:56:36.741329 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b16b7fa7-e1c5-4368-9f6b-cc5a7526671f" path="/var/lib/kubelet/pods/b16b7fa7-e1c5-4368-9f6b-cc5a7526671f/volumes" Sep 29 09:56:38 crc kubenswrapper[4779]: I0929 09:56:38.003423 4779 scope.go:117] "RemoveContainer" containerID="651783ca245d77f0afa2e410b9981583f485aea154b5dcfde8e0dab42f54b11d" Sep 29 09:56:38 crc kubenswrapper[4779]: I0929 09:56:38.026174 4779 scope.go:117] "RemoveContainer" containerID="85715d7ff260c61df401585a2352a5f5a64c7be90f3ce8200620d79fabccbd58" Sep 29 09:56:38 crc kubenswrapper[4779]: I0929 09:56:38.076735 4779 scope.go:117] "RemoveContainer" containerID="8453735f3ea9dbaa70d50cbcb969524425033812b9026e34fb1b6283314c9d17" Sep 29 09:56:38 crc kubenswrapper[4779]: I0929 09:56:38.127731 4779 scope.go:117] "RemoveContainer" containerID="0d9fa0303943299319ae8db9134c941057c1848ba3bef4d7fe0d2bc9caf937db" Sep 29 09:56:38 crc kubenswrapper[4779]: I0929 09:56:38.167614 4779 scope.go:117] "RemoveContainer" containerID="21943e6406322efcbed715e36a162f36d904fbcb8596d313a68820341259471f" Sep 29 09:56:38 crc kubenswrapper[4779]: I0929 09:56:38.206104 4779 scope.go:117] "RemoveContainer" containerID="927a1a0110dd181766e0448083391941f565579331f56c0859febfdf38e68f4b" Sep 29 09:56:38 crc kubenswrapper[4779]: I0929 09:56:38.281840 4779 scope.go:117] "RemoveContainer" containerID="6908d8bacc0ddb133fedc2bdef6d4b7d272243d33be84c58815b6cbd7259b19c" Sep 29 09:56:38 crc kubenswrapper[4779]: I0929 09:56:38.312091 4779 scope.go:117] "RemoveContainer" containerID="2127a5e642bba05c2507422aefe585dfee8c5f8076964ddf6772533a520e6ede" Sep 29 09:56:38 crc kubenswrapper[4779]: I0929 09:56:38.333124 4779 scope.go:117] "RemoveContainer" containerID="9d53d74c76ef90d1be78b9d8eb35bfa96765c67318df2e7b3e506299d03c3cf6" Sep 29 09:56:38 crc kubenswrapper[4779]: I0929 09:56:38.353162 4779 scope.go:117] "RemoveContainer" containerID="86ba1475aa4cafc80d5808d802fbc60525a1629426a21eed31f1e7f3cb274630" Sep 29 09:56:38 crc kubenswrapper[4779]: I0929 09:56:38.369991 4779 scope.go:117] "RemoveContainer" containerID="1ca1dc776d47cb3b2b721ac48874f0d2dda8e5b90f422ca6950cd1bec6581c97" Sep 29 09:56:38 crc kubenswrapper[4779]: I0929 09:56:38.386599 4779 scope.go:117] "RemoveContainer" containerID="64093fcd75ba4993fc70e84f1f222e9b7df183fe5712749c18488cb3786cd5ea" Sep 29 09:56:38 crc kubenswrapper[4779]: I0929 09:56:38.407322 4779 scope.go:117] "RemoveContainer" containerID="a781d91bc32d0761cc88ff22e5bdaf4fefc948be30b0f4813a9c5bacfeba6e51" Sep 29 09:56:38 crc kubenswrapper[4779]: I0929 09:56:38.426436 4779 scope.go:117] "RemoveContainer" containerID="71d9135b9439a259f8866b01cf297bd49759f4325aa6148e628979f2a5599da5" Sep 29 09:56:39 crc kubenswrapper[4779]: I0929 09:56:39.037940 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-421c-account-create-hf6f2"] Sep 29 09:56:39 crc kubenswrapper[4779]: I0929 09:56:39.046798 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-421c-account-create-hf6f2"] Sep 29 09:56:39 crc kubenswrapper[4779]: I0929 09:56:39.725876 4779 generic.go:334] "Generic (PLEG): container finished" podID="cadc8662-a6be-4cd3-8042-9bb980421260" containerID="fb5c444fdeaa272950679204b6ef3e50e5899685ad0710f129afe88cac946f7d" exitCode=0 Sep 29 09:56:39 crc kubenswrapper[4779]: I0929 09:56:39.725974 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nxx5t" event={"ID":"cadc8662-a6be-4cd3-8042-9bb980421260","Type":"ContainerDied","Data":"fb5c444fdeaa272950679204b6ef3e50e5899685ad0710f129afe88cac946f7d"} Sep 29 09:56:40 crc kubenswrapper[4779]: I0929 09:56:40.729457 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8687a876-1897-472b-9a43-dc6ab0fb034a" path="/var/lib/kubelet/pods/8687a876-1897-472b-9a43-dc6ab0fb034a/volumes" Sep 29 09:56:41 crc kubenswrapper[4779]: I0929 09:56:41.124632 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nxx5t" Sep 29 09:56:41 crc kubenswrapper[4779]: I0929 09:56:41.273881 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cadc8662-a6be-4cd3-8042-9bb980421260-inventory\") pod \"cadc8662-a6be-4cd3-8042-9bb980421260\" (UID: \"cadc8662-a6be-4cd3-8042-9bb980421260\") " Sep 29 09:56:41 crc kubenswrapper[4779]: I0929 09:56:41.273989 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cadc8662-a6be-4cd3-8042-9bb980421260-ssh-key\") pod \"cadc8662-a6be-4cd3-8042-9bb980421260\" (UID: \"cadc8662-a6be-4cd3-8042-9bb980421260\") " Sep 29 09:56:41 crc kubenswrapper[4779]: I0929 09:56:41.274183 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6kxq6\" (UniqueName: \"kubernetes.io/projected/cadc8662-a6be-4cd3-8042-9bb980421260-kube-api-access-6kxq6\") pod \"cadc8662-a6be-4cd3-8042-9bb980421260\" (UID: \"cadc8662-a6be-4cd3-8042-9bb980421260\") " Sep 29 09:56:41 crc kubenswrapper[4779]: I0929 09:56:41.279537 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cadc8662-a6be-4cd3-8042-9bb980421260-kube-api-access-6kxq6" (OuterVolumeSpecName: "kube-api-access-6kxq6") pod "cadc8662-a6be-4cd3-8042-9bb980421260" (UID: "cadc8662-a6be-4cd3-8042-9bb980421260"). InnerVolumeSpecName "kube-api-access-6kxq6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:56:41 crc kubenswrapper[4779]: I0929 09:56:41.305771 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cadc8662-a6be-4cd3-8042-9bb980421260-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "cadc8662-a6be-4cd3-8042-9bb980421260" (UID: "cadc8662-a6be-4cd3-8042-9bb980421260"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:56:41 crc kubenswrapper[4779]: I0929 09:56:41.306804 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cadc8662-a6be-4cd3-8042-9bb980421260-inventory" (OuterVolumeSpecName: "inventory") pod "cadc8662-a6be-4cd3-8042-9bb980421260" (UID: "cadc8662-a6be-4cd3-8042-9bb980421260"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:56:41 crc kubenswrapper[4779]: I0929 09:56:41.376079 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6kxq6\" (UniqueName: \"kubernetes.io/projected/cadc8662-a6be-4cd3-8042-9bb980421260-kube-api-access-6kxq6\") on node \"crc\" DevicePath \"\"" Sep 29 09:56:41 crc kubenswrapper[4779]: I0929 09:56:41.376115 4779 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cadc8662-a6be-4cd3-8042-9bb980421260-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 09:56:41 crc kubenswrapper[4779]: I0929 09:56:41.376127 4779 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cadc8662-a6be-4cd3-8042-9bb980421260-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 09:56:41 crc kubenswrapper[4779]: I0929 09:56:41.752552 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nxx5t" event={"ID":"cadc8662-a6be-4cd3-8042-9bb980421260","Type":"ContainerDied","Data":"5f4603022aedff3cb4194373a5e238331d0f05c7d2f506ab1736f6c19f1f36ed"} Sep 29 09:56:41 crc kubenswrapper[4779]: I0929 09:56:41.752591 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nxx5t" Sep 29 09:56:41 crc kubenswrapper[4779]: I0929 09:56:41.752613 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5f4603022aedff3cb4194373a5e238331d0f05c7d2f506ab1736f6c19f1f36ed" Sep 29 09:56:41 crc kubenswrapper[4779]: I0929 09:56:41.832407 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6s8z7"] Sep 29 09:56:41 crc kubenswrapper[4779]: E0929 09:56:41.832761 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2195e5e0-40dc-4715-ace3-a42325cd8788" containerName="registry-server" Sep 29 09:56:41 crc kubenswrapper[4779]: I0929 09:56:41.832777 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="2195e5e0-40dc-4715-ace3-a42325cd8788" containerName="registry-server" Sep 29 09:56:41 crc kubenswrapper[4779]: E0929 09:56:41.832792 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2195e5e0-40dc-4715-ace3-a42325cd8788" containerName="extract-content" Sep 29 09:56:41 crc kubenswrapper[4779]: I0929 09:56:41.832798 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="2195e5e0-40dc-4715-ace3-a42325cd8788" containerName="extract-content" Sep 29 09:56:41 crc kubenswrapper[4779]: E0929 09:56:41.832809 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2195e5e0-40dc-4715-ace3-a42325cd8788" containerName="extract-utilities" Sep 29 09:56:41 crc kubenswrapper[4779]: I0929 09:56:41.832815 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="2195e5e0-40dc-4715-ace3-a42325cd8788" containerName="extract-utilities" Sep 29 09:56:41 crc kubenswrapper[4779]: E0929 09:56:41.832824 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cadc8662-a6be-4cd3-8042-9bb980421260" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Sep 29 09:56:41 crc kubenswrapper[4779]: I0929 09:56:41.832831 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="cadc8662-a6be-4cd3-8042-9bb980421260" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Sep 29 09:56:41 crc kubenswrapper[4779]: I0929 09:56:41.833035 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="2195e5e0-40dc-4715-ace3-a42325cd8788" containerName="registry-server" Sep 29 09:56:41 crc kubenswrapper[4779]: I0929 09:56:41.833064 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="cadc8662-a6be-4cd3-8042-9bb980421260" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Sep 29 09:56:41 crc kubenswrapper[4779]: I0929 09:56:41.833649 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6s8z7" Sep 29 09:56:41 crc kubenswrapper[4779]: I0929 09:56:41.835997 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 09:56:41 crc kubenswrapper[4779]: I0929 09:56:41.836320 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-l7nsb" Sep 29 09:56:41 crc kubenswrapper[4779]: I0929 09:56:41.839609 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 09:56:41 crc kubenswrapper[4779]: I0929 09:56:41.839823 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 09:56:41 crc kubenswrapper[4779]: I0929 09:56:41.849315 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6s8z7"] Sep 29 09:56:41 crc kubenswrapper[4779]: I0929 09:56:41.985612 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7f75becd-23f3-46d5-ae30-73c4518c571e-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-6s8z7\" (UID: \"7f75becd-23f3-46d5-ae30-73c4518c571e\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6s8z7" Sep 29 09:56:41 crc kubenswrapper[4779]: I0929 09:56:41.985783 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xdwrw\" (UniqueName: \"kubernetes.io/projected/7f75becd-23f3-46d5-ae30-73c4518c571e-kube-api-access-xdwrw\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-6s8z7\" (UID: \"7f75becd-23f3-46d5-ae30-73c4518c571e\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6s8z7" Sep 29 09:56:41 crc kubenswrapper[4779]: I0929 09:56:41.985822 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7f75becd-23f3-46d5-ae30-73c4518c571e-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-6s8z7\" (UID: \"7f75becd-23f3-46d5-ae30-73c4518c571e\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6s8z7" Sep 29 09:56:42 crc kubenswrapper[4779]: I0929 09:56:42.028327 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-db-sync-lg2fm"] Sep 29 09:56:42 crc kubenswrapper[4779]: I0929 09:56:42.037471 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/watcher-db-sync-lg2fm"] Sep 29 09:56:42 crc kubenswrapper[4779]: I0929 09:56:42.087218 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7f75becd-23f3-46d5-ae30-73c4518c571e-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-6s8z7\" (UID: \"7f75becd-23f3-46d5-ae30-73c4518c571e\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6s8z7" Sep 29 09:56:42 crc kubenswrapper[4779]: I0929 09:56:42.087347 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7f75becd-23f3-46d5-ae30-73c4518c571e-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-6s8z7\" (UID: \"7f75becd-23f3-46d5-ae30-73c4518c571e\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6s8z7" Sep 29 09:56:42 crc kubenswrapper[4779]: I0929 09:56:42.087440 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xdwrw\" (UniqueName: \"kubernetes.io/projected/7f75becd-23f3-46d5-ae30-73c4518c571e-kube-api-access-xdwrw\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-6s8z7\" (UID: \"7f75becd-23f3-46d5-ae30-73c4518c571e\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6s8z7" Sep 29 09:56:42 crc kubenswrapper[4779]: I0929 09:56:42.092569 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7f75becd-23f3-46d5-ae30-73c4518c571e-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-6s8z7\" (UID: \"7f75becd-23f3-46d5-ae30-73c4518c571e\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6s8z7" Sep 29 09:56:42 crc kubenswrapper[4779]: I0929 09:56:42.093387 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7f75becd-23f3-46d5-ae30-73c4518c571e-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-6s8z7\" (UID: \"7f75becd-23f3-46d5-ae30-73c4518c571e\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6s8z7" Sep 29 09:56:42 crc kubenswrapper[4779]: I0929 09:56:42.106664 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xdwrw\" (UniqueName: \"kubernetes.io/projected/7f75becd-23f3-46d5-ae30-73c4518c571e-kube-api-access-xdwrw\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-6s8z7\" (UID: \"7f75becd-23f3-46d5-ae30-73c4518c571e\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6s8z7" Sep 29 09:56:42 crc kubenswrapper[4779]: I0929 09:56:42.199302 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6s8z7" Sep 29 09:56:42 crc kubenswrapper[4779]: I0929 09:56:42.709102 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6s8z7"] Sep 29 09:56:42 crc kubenswrapper[4779]: I0929 09:56:42.724827 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fbef3a83-4b34-466e-895f-1005a824efc0" path="/var/lib/kubelet/pods/fbef3a83-4b34-466e-895f-1005a824efc0/volumes" Sep 29 09:56:42 crc kubenswrapper[4779]: I0929 09:56:42.761887 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6s8z7" event={"ID":"7f75becd-23f3-46d5-ae30-73c4518c571e","Type":"ContainerStarted","Data":"5ae25b85307a1c87e88cad0c038b2e9b206adc747a0ff0978982b792957f0941"} Sep 29 09:56:43 crc kubenswrapper[4779]: I0929 09:56:43.041073 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-4jkl4"] Sep 29 09:56:43 crc kubenswrapper[4779]: I0929 09:56:43.046202 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-4jkl4"] Sep 29 09:56:43 crc kubenswrapper[4779]: I0929 09:56:43.772404 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6s8z7" event={"ID":"7f75becd-23f3-46d5-ae30-73c4518c571e","Type":"ContainerStarted","Data":"e935d94c1390bd70e16904fec651fa490f2190987462f0903463e25abfd50756"} Sep 29 09:56:44 crc kubenswrapper[4779]: I0929 09:56:44.724863 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6eead9b6-ef70-4326-be2e-07fc0e22c444" path="/var/lib/kubelet/pods/6eead9b6-ef70-4326-be2e-07fc0e22c444/volumes" Sep 29 09:56:46 crc kubenswrapper[4779]: I0929 09:56:46.966676 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 09:56:46 crc kubenswrapper[4779]: I0929 09:56:46.967049 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 09:56:48 crc kubenswrapper[4779]: I0929 09:56:48.817713 4779 generic.go:334] "Generic (PLEG): container finished" podID="7f75becd-23f3-46d5-ae30-73c4518c571e" containerID="e935d94c1390bd70e16904fec651fa490f2190987462f0903463e25abfd50756" exitCode=0 Sep 29 09:56:48 crc kubenswrapper[4779]: I0929 09:56:48.817831 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6s8z7" event={"ID":"7f75becd-23f3-46d5-ae30-73c4518c571e","Type":"ContainerDied","Data":"e935d94c1390bd70e16904fec651fa490f2190987462f0903463e25abfd50756"} Sep 29 09:56:50 crc kubenswrapper[4779]: I0929 09:56:50.224510 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6s8z7" Sep 29 09:56:50 crc kubenswrapper[4779]: I0929 09:56:50.367630 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7f75becd-23f3-46d5-ae30-73c4518c571e-inventory\") pod \"7f75becd-23f3-46d5-ae30-73c4518c571e\" (UID: \"7f75becd-23f3-46d5-ae30-73c4518c571e\") " Sep 29 09:56:50 crc kubenswrapper[4779]: I0929 09:56:50.367675 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7f75becd-23f3-46d5-ae30-73c4518c571e-ssh-key\") pod \"7f75becd-23f3-46d5-ae30-73c4518c571e\" (UID: \"7f75becd-23f3-46d5-ae30-73c4518c571e\") " Sep 29 09:56:50 crc kubenswrapper[4779]: I0929 09:56:50.367774 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xdwrw\" (UniqueName: \"kubernetes.io/projected/7f75becd-23f3-46d5-ae30-73c4518c571e-kube-api-access-xdwrw\") pod \"7f75becd-23f3-46d5-ae30-73c4518c571e\" (UID: \"7f75becd-23f3-46d5-ae30-73c4518c571e\") " Sep 29 09:56:50 crc kubenswrapper[4779]: I0929 09:56:50.374928 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7f75becd-23f3-46d5-ae30-73c4518c571e-kube-api-access-xdwrw" (OuterVolumeSpecName: "kube-api-access-xdwrw") pod "7f75becd-23f3-46d5-ae30-73c4518c571e" (UID: "7f75becd-23f3-46d5-ae30-73c4518c571e"). InnerVolumeSpecName "kube-api-access-xdwrw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:56:50 crc kubenswrapper[4779]: I0929 09:56:50.396664 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f75becd-23f3-46d5-ae30-73c4518c571e-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "7f75becd-23f3-46d5-ae30-73c4518c571e" (UID: "7f75becd-23f3-46d5-ae30-73c4518c571e"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:56:50 crc kubenswrapper[4779]: I0929 09:56:50.396721 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f75becd-23f3-46d5-ae30-73c4518c571e-inventory" (OuterVolumeSpecName: "inventory") pod "7f75becd-23f3-46d5-ae30-73c4518c571e" (UID: "7f75becd-23f3-46d5-ae30-73c4518c571e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:56:50 crc kubenswrapper[4779]: I0929 09:56:50.470201 4779 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7f75becd-23f3-46d5-ae30-73c4518c571e-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 09:56:50 crc kubenswrapper[4779]: I0929 09:56:50.470236 4779 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7f75becd-23f3-46d5-ae30-73c4518c571e-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 09:56:50 crc kubenswrapper[4779]: I0929 09:56:50.470246 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xdwrw\" (UniqueName: \"kubernetes.io/projected/7f75becd-23f3-46d5-ae30-73c4518c571e-kube-api-access-xdwrw\") on node \"crc\" DevicePath \"\"" Sep 29 09:56:50 crc kubenswrapper[4779]: I0929 09:56:50.835150 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6s8z7" event={"ID":"7f75becd-23f3-46d5-ae30-73c4518c571e","Type":"ContainerDied","Data":"5ae25b85307a1c87e88cad0c038b2e9b206adc747a0ff0978982b792957f0941"} Sep 29 09:56:50 crc kubenswrapper[4779]: I0929 09:56:50.835471 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5ae25b85307a1c87e88cad0c038b2e9b206adc747a0ff0978982b792957f0941" Sep 29 09:56:50 crc kubenswrapper[4779]: I0929 09:56:50.835534 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6s8z7" Sep 29 09:56:50 crc kubenswrapper[4779]: I0929 09:56:50.932975 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-qt854"] Sep 29 09:56:50 crc kubenswrapper[4779]: E0929 09:56:50.933382 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f75becd-23f3-46d5-ae30-73c4518c571e" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Sep 29 09:56:50 crc kubenswrapper[4779]: I0929 09:56:50.933402 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f75becd-23f3-46d5-ae30-73c4518c571e" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Sep 29 09:56:50 crc kubenswrapper[4779]: I0929 09:56:50.933582 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f75becd-23f3-46d5-ae30-73c4518c571e" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Sep 29 09:56:50 crc kubenswrapper[4779]: I0929 09:56:50.934211 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qt854" Sep 29 09:56:50 crc kubenswrapper[4779]: I0929 09:56:50.937270 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 09:56:50 crc kubenswrapper[4779]: I0929 09:56:50.937412 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 09:56:50 crc kubenswrapper[4779]: I0929 09:56:50.937733 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-l7nsb" Sep 29 09:56:50 crc kubenswrapper[4779]: I0929 09:56:50.941967 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 09:56:50 crc kubenswrapper[4779]: I0929 09:56:50.960680 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-qt854"] Sep 29 09:56:51 crc kubenswrapper[4779]: I0929 09:56:51.042439 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-e005-account-create-fcctv"] Sep 29 09:56:51 crc kubenswrapper[4779]: I0929 09:56:51.051242 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-df0d-account-create-zpd2h"] Sep 29 09:56:51 crc kubenswrapper[4779]: I0929 09:56:51.065597 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-e005-account-create-fcctv"] Sep 29 09:56:51 crc kubenswrapper[4779]: I0929 09:56:51.073370 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-df0d-account-create-zpd2h"] Sep 29 09:56:51 crc kubenswrapper[4779]: I0929 09:56:51.081719 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ccb51e5d-bdcb-4381-b665-87023be5aa51-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-qt854\" (UID: \"ccb51e5d-bdcb-4381-b665-87023be5aa51\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qt854" Sep 29 09:56:51 crc kubenswrapper[4779]: I0929 09:56:51.081928 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ccb51e5d-bdcb-4381-b665-87023be5aa51-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-qt854\" (UID: \"ccb51e5d-bdcb-4381-b665-87023be5aa51\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qt854" Sep 29 09:56:51 crc kubenswrapper[4779]: I0929 09:56:51.082087 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zrx9t\" (UniqueName: \"kubernetes.io/projected/ccb51e5d-bdcb-4381-b665-87023be5aa51-kube-api-access-zrx9t\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-qt854\" (UID: \"ccb51e5d-bdcb-4381-b665-87023be5aa51\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qt854" Sep 29 09:56:51 crc kubenswrapper[4779]: I0929 09:56:51.184057 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ccb51e5d-bdcb-4381-b665-87023be5aa51-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-qt854\" (UID: \"ccb51e5d-bdcb-4381-b665-87023be5aa51\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qt854" Sep 29 09:56:51 crc kubenswrapper[4779]: I0929 09:56:51.184150 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ccb51e5d-bdcb-4381-b665-87023be5aa51-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-qt854\" (UID: \"ccb51e5d-bdcb-4381-b665-87023be5aa51\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qt854" Sep 29 09:56:51 crc kubenswrapper[4779]: I0929 09:56:51.184204 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zrx9t\" (UniqueName: \"kubernetes.io/projected/ccb51e5d-bdcb-4381-b665-87023be5aa51-kube-api-access-zrx9t\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-qt854\" (UID: \"ccb51e5d-bdcb-4381-b665-87023be5aa51\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qt854" Sep 29 09:56:51 crc kubenswrapper[4779]: I0929 09:56:51.192860 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ccb51e5d-bdcb-4381-b665-87023be5aa51-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-qt854\" (UID: \"ccb51e5d-bdcb-4381-b665-87023be5aa51\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qt854" Sep 29 09:56:51 crc kubenswrapper[4779]: I0929 09:56:51.192914 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ccb51e5d-bdcb-4381-b665-87023be5aa51-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-qt854\" (UID: \"ccb51e5d-bdcb-4381-b665-87023be5aa51\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qt854" Sep 29 09:56:51 crc kubenswrapper[4779]: I0929 09:56:51.202431 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zrx9t\" (UniqueName: \"kubernetes.io/projected/ccb51e5d-bdcb-4381-b665-87023be5aa51-kube-api-access-zrx9t\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-qt854\" (UID: \"ccb51e5d-bdcb-4381-b665-87023be5aa51\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qt854" Sep 29 09:56:51 crc kubenswrapper[4779]: I0929 09:56:51.254464 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qt854" Sep 29 09:56:51 crc kubenswrapper[4779]: I0929 09:56:51.799828 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-qt854"] Sep 29 09:56:51 crc kubenswrapper[4779]: I0929 09:56:51.844729 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qt854" event={"ID":"ccb51e5d-bdcb-4381-b665-87023be5aa51","Type":"ContainerStarted","Data":"6d39c0e304c1a8a037228d37dbc1b002892b9850778243688c359bf8a3863378"} Sep 29 09:56:52 crc kubenswrapper[4779]: I0929 09:56:52.726304 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2f74fcce-3141-4e78-a238-d10f3ff03a11" path="/var/lib/kubelet/pods/2f74fcce-3141-4e78-a238-d10f3ff03a11/volumes" Sep 29 09:56:52 crc kubenswrapper[4779]: I0929 09:56:52.728051 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e274f651-2029-48e9-9142-bb141ebdf551" path="/var/lib/kubelet/pods/e274f651-2029-48e9-9142-bb141ebdf551/volumes" Sep 29 09:56:52 crc kubenswrapper[4779]: I0929 09:56:52.854552 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qt854" event={"ID":"ccb51e5d-bdcb-4381-b665-87023be5aa51","Type":"ContainerStarted","Data":"50e856a08f214e1a45d5aaaca645891abb1ce8917036edbe1548e1f41cdc7eb8"} Sep 29 09:56:52 crc kubenswrapper[4779]: I0929 09:56:52.879012 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qt854" podStartSLOduration=2.313635066 podStartE2EDuration="2.878996438s" podCreationTimestamp="2025-09-29 09:56:50 +0000 UTC" firstStartedPulling="2025-09-29 09:56:51.807673591 +0000 UTC m=+1643.788997485" lastFinishedPulling="2025-09-29 09:56:52.373034953 +0000 UTC m=+1644.354358857" observedRunningTime="2025-09-29 09:56:52.87115948 +0000 UTC m=+1644.852483384" watchObservedRunningTime="2025-09-29 09:56:52.878996438 +0000 UTC m=+1644.860320332" Sep 29 09:57:05 crc kubenswrapper[4779]: I0929 09:57:05.037826 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-fh8wz"] Sep 29 09:57:05 crc kubenswrapper[4779]: I0929 09:57:05.053519 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-fh8wz"] Sep 29 09:57:06 crc kubenswrapper[4779]: I0929 09:57:06.753455 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d52d2b76-d868-4f6d-ab27-20d1d8223952" path="/var/lib/kubelet/pods/d52d2b76-d868-4f6d-ab27-20d1d8223952/volumes" Sep 29 09:57:13 crc kubenswrapper[4779]: I0929 09:57:13.030199 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-kscbf"] Sep 29 09:57:13 crc kubenswrapper[4779]: I0929 09:57:13.041241 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-kscbf"] Sep 29 09:57:14 crc kubenswrapper[4779]: I0929 09:57:14.730504 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1b1b89f6-8491-4d78-bfb2-7959e2f6765d" path="/var/lib/kubelet/pods/1b1b89f6-8491-4d78-bfb2-7959e2f6765d/volumes" Sep 29 09:57:16 crc kubenswrapper[4779]: I0929 09:57:16.055544 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-7rjgf"] Sep 29 09:57:16 crc kubenswrapper[4779]: I0929 09:57:16.063015 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-7rjgf"] Sep 29 09:57:16 crc kubenswrapper[4779]: I0929 09:57:16.728884 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7ce07226-a77d-46d9-a099-04375136d8fc" path="/var/lib/kubelet/pods/7ce07226-a77d-46d9-a099-04375136d8fc/volumes" Sep 29 09:57:16 crc kubenswrapper[4779]: I0929 09:57:16.966106 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 09:57:16 crc kubenswrapper[4779]: I0929 09:57:16.966171 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 09:57:24 crc kubenswrapper[4779]: I0929 09:57:24.045820 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-9l6pm"] Sep 29 09:57:24 crc kubenswrapper[4779]: I0929 09:57:24.057127 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-9l6pm"] Sep 29 09:57:24 crc kubenswrapper[4779]: I0929 09:57:24.727557 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e5102d17-8f46-47e2-8a21-d8afec790069" path="/var/lib/kubelet/pods/e5102d17-8f46-47e2-8a21-d8afec790069/volumes" Sep 29 09:57:30 crc kubenswrapper[4779]: E0929 09:57:30.215713 4779 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podccb51e5d_bdcb_4381_b665_87023be5aa51.slice/crio-conmon-50e856a08f214e1a45d5aaaca645891abb1ce8917036edbe1548e1f41cdc7eb8.scope\": RecentStats: unable to find data in memory cache]" Sep 29 09:57:30 crc kubenswrapper[4779]: I0929 09:57:30.296923 4779 generic.go:334] "Generic (PLEG): container finished" podID="ccb51e5d-bdcb-4381-b665-87023be5aa51" containerID="50e856a08f214e1a45d5aaaca645891abb1ce8917036edbe1548e1f41cdc7eb8" exitCode=0 Sep 29 09:57:30 crc kubenswrapper[4779]: I0929 09:57:30.297058 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qt854" event={"ID":"ccb51e5d-bdcb-4381-b665-87023be5aa51","Type":"ContainerDied","Data":"50e856a08f214e1a45d5aaaca645891abb1ce8917036edbe1548e1f41cdc7eb8"} Sep 29 09:57:31 crc kubenswrapper[4779]: I0929 09:57:31.724459 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qt854" Sep 29 09:57:31 crc kubenswrapper[4779]: I0929 09:57:31.901642 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ccb51e5d-bdcb-4381-b665-87023be5aa51-inventory\") pod \"ccb51e5d-bdcb-4381-b665-87023be5aa51\" (UID: \"ccb51e5d-bdcb-4381-b665-87023be5aa51\") " Sep 29 09:57:31 crc kubenswrapper[4779]: I0929 09:57:31.902638 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ccb51e5d-bdcb-4381-b665-87023be5aa51-ssh-key\") pod \"ccb51e5d-bdcb-4381-b665-87023be5aa51\" (UID: \"ccb51e5d-bdcb-4381-b665-87023be5aa51\") " Sep 29 09:57:31 crc kubenswrapper[4779]: I0929 09:57:31.902762 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zrx9t\" (UniqueName: \"kubernetes.io/projected/ccb51e5d-bdcb-4381-b665-87023be5aa51-kube-api-access-zrx9t\") pod \"ccb51e5d-bdcb-4381-b665-87023be5aa51\" (UID: \"ccb51e5d-bdcb-4381-b665-87023be5aa51\") " Sep 29 09:57:31 crc kubenswrapper[4779]: I0929 09:57:31.907375 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ccb51e5d-bdcb-4381-b665-87023be5aa51-kube-api-access-zrx9t" (OuterVolumeSpecName: "kube-api-access-zrx9t") pod "ccb51e5d-bdcb-4381-b665-87023be5aa51" (UID: "ccb51e5d-bdcb-4381-b665-87023be5aa51"). InnerVolumeSpecName "kube-api-access-zrx9t". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:57:31 crc kubenswrapper[4779]: I0929 09:57:31.938783 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ccb51e5d-bdcb-4381-b665-87023be5aa51-inventory" (OuterVolumeSpecName: "inventory") pod "ccb51e5d-bdcb-4381-b665-87023be5aa51" (UID: "ccb51e5d-bdcb-4381-b665-87023be5aa51"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:57:31 crc kubenswrapper[4779]: I0929 09:57:31.939135 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ccb51e5d-bdcb-4381-b665-87023be5aa51-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "ccb51e5d-bdcb-4381-b665-87023be5aa51" (UID: "ccb51e5d-bdcb-4381-b665-87023be5aa51"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:57:32 crc kubenswrapper[4779]: I0929 09:57:32.006073 4779 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ccb51e5d-bdcb-4381-b665-87023be5aa51-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 09:57:32 crc kubenswrapper[4779]: I0929 09:57:32.006111 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zrx9t\" (UniqueName: \"kubernetes.io/projected/ccb51e5d-bdcb-4381-b665-87023be5aa51-kube-api-access-zrx9t\") on node \"crc\" DevicePath \"\"" Sep 29 09:57:32 crc kubenswrapper[4779]: I0929 09:57:32.006127 4779 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ccb51e5d-bdcb-4381-b665-87023be5aa51-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 09:57:32 crc kubenswrapper[4779]: I0929 09:57:32.316317 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qt854" event={"ID":"ccb51e5d-bdcb-4381-b665-87023be5aa51","Type":"ContainerDied","Data":"6d39c0e304c1a8a037228d37dbc1b002892b9850778243688c359bf8a3863378"} Sep 29 09:57:32 crc kubenswrapper[4779]: I0929 09:57:32.316360 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6d39c0e304c1a8a037228d37dbc1b002892b9850778243688c359bf8a3863378" Sep 29 09:57:32 crc kubenswrapper[4779]: I0929 09:57:32.316377 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qt854" Sep 29 09:57:32 crc kubenswrapper[4779]: I0929 09:57:32.399510 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-nj5nz"] Sep 29 09:57:32 crc kubenswrapper[4779]: E0929 09:57:32.399886 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ccb51e5d-bdcb-4381-b665-87023be5aa51" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Sep 29 09:57:32 crc kubenswrapper[4779]: I0929 09:57:32.399918 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="ccb51e5d-bdcb-4381-b665-87023be5aa51" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Sep 29 09:57:32 crc kubenswrapper[4779]: I0929 09:57:32.400129 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="ccb51e5d-bdcb-4381-b665-87023be5aa51" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Sep 29 09:57:32 crc kubenswrapper[4779]: I0929 09:57:32.400754 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-nj5nz" Sep 29 09:57:32 crc kubenswrapper[4779]: I0929 09:57:32.403784 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 09:57:32 crc kubenswrapper[4779]: I0929 09:57:32.404182 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-l7nsb" Sep 29 09:57:32 crc kubenswrapper[4779]: I0929 09:57:32.404203 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 09:57:32 crc kubenswrapper[4779]: I0929 09:57:32.404275 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 09:57:32 crc kubenswrapper[4779]: I0929 09:57:32.416183 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-nj5nz"] Sep 29 09:57:32 crc kubenswrapper[4779]: I0929 09:57:32.514248 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/172c15cd-62e0-4fe1-9132-cb7df2f53783-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-nj5nz\" (UID: \"172c15cd-62e0-4fe1-9132-cb7df2f53783\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-nj5nz" Sep 29 09:57:32 crc kubenswrapper[4779]: I0929 09:57:32.514657 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rbfvt\" (UniqueName: \"kubernetes.io/projected/172c15cd-62e0-4fe1-9132-cb7df2f53783-kube-api-access-rbfvt\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-nj5nz\" (UID: \"172c15cd-62e0-4fe1-9132-cb7df2f53783\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-nj5nz" Sep 29 09:57:32 crc kubenswrapper[4779]: I0929 09:57:32.514756 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/172c15cd-62e0-4fe1-9132-cb7df2f53783-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-nj5nz\" (UID: \"172c15cd-62e0-4fe1-9132-cb7df2f53783\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-nj5nz" Sep 29 09:57:32 crc kubenswrapper[4779]: I0929 09:57:32.616359 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rbfvt\" (UniqueName: \"kubernetes.io/projected/172c15cd-62e0-4fe1-9132-cb7df2f53783-kube-api-access-rbfvt\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-nj5nz\" (UID: \"172c15cd-62e0-4fe1-9132-cb7df2f53783\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-nj5nz" Sep 29 09:57:32 crc kubenswrapper[4779]: I0929 09:57:32.616459 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/172c15cd-62e0-4fe1-9132-cb7df2f53783-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-nj5nz\" (UID: \"172c15cd-62e0-4fe1-9132-cb7df2f53783\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-nj5nz" Sep 29 09:57:32 crc kubenswrapper[4779]: I0929 09:57:32.616522 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/172c15cd-62e0-4fe1-9132-cb7df2f53783-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-nj5nz\" (UID: \"172c15cd-62e0-4fe1-9132-cb7df2f53783\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-nj5nz" Sep 29 09:57:32 crc kubenswrapper[4779]: I0929 09:57:32.620726 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/172c15cd-62e0-4fe1-9132-cb7df2f53783-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-nj5nz\" (UID: \"172c15cd-62e0-4fe1-9132-cb7df2f53783\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-nj5nz" Sep 29 09:57:32 crc kubenswrapper[4779]: I0929 09:57:32.620780 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/172c15cd-62e0-4fe1-9132-cb7df2f53783-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-nj5nz\" (UID: \"172c15cd-62e0-4fe1-9132-cb7df2f53783\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-nj5nz" Sep 29 09:57:32 crc kubenswrapper[4779]: I0929 09:57:32.633762 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rbfvt\" (UniqueName: \"kubernetes.io/projected/172c15cd-62e0-4fe1-9132-cb7df2f53783-kube-api-access-rbfvt\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-nj5nz\" (UID: \"172c15cd-62e0-4fe1-9132-cb7df2f53783\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-nj5nz" Sep 29 09:57:32 crc kubenswrapper[4779]: I0929 09:57:32.724435 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-nj5nz" Sep 29 09:57:33 crc kubenswrapper[4779]: I0929 09:57:33.286051 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-nj5nz"] Sep 29 09:57:33 crc kubenswrapper[4779]: W0929 09:57:33.293534 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod172c15cd_62e0_4fe1_9132_cb7df2f53783.slice/crio-a1acb8bd7e378ba84681bc686b2a933846c899f324b8e34753ba422fc2ab5e69 WatchSource:0}: Error finding container a1acb8bd7e378ba84681bc686b2a933846c899f324b8e34753ba422fc2ab5e69: Status 404 returned error can't find the container with id a1acb8bd7e378ba84681bc686b2a933846c899f324b8e34753ba422fc2ab5e69 Sep 29 09:57:33 crc kubenswrapper[4779]: I0929 09:57:33.328456 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-nj5nz" event={"ID":"172c15cd-62e0-4fe1-9132-cb7df2f53783","Type":"ContainerStarted","Data":"a1acb8bd7e378ba84681bc686b2a933846c899f324b8e34753ba422fc2ab5e69"} Sep 29 09:57:34 crc kubenswrapper[4779]: I0929 09:57:34.341086 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-nj5nz" event={"ID":"172c15cd-62e0-4fe1-9132-cb7df2f53783","Type":"ContainerStarted","Data":"da5f46c2bee9022cc73ca1433a78e88db0f72656fa6ec556672cbbd3c18cd211"} Sep 29 09:57:34 crc kubenswrapper[4779]: I0929 09:57:34.375698 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-nj5nz" podStartSLOduration=1.878473952 podStartE2EDuration="2.375665492s" podCreationTimestamp="2025-09-29 09:57:32 +0000 UTC" firstStartedPulling="2025-09-29 09:57:33.296374955 +0000 UTC m=+1685.277698859" lastFinishedPulling="2025-09-29 09:57:33.793566495 +0000 UTC m=+1685.774890399" observedRunningTime="2025-09-29 09:57:34.366440924 +0000 UTC m=+1686.347764898" watchObservedRunningTime="2025-09-29 09:57:34.375665492 +0000 UTC m=+1686.356989436" Sep 29 09:57:37 crc kubenswrapper[4779]: I0929 09:57:37.048974 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-r5dk2"] Sep 29 09:57:37 crc kubenswrapper[4779]: I0929 09:57:37.064463 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-r5dk2"] Sep 29 09:57:38 crc kubenswrapper[4779]: I0929 09:57:38.383091 4779 generic.go:334] "Generic (PLEG): container finished" podID="172c15cd-62e0-4fe1-9132-cb7df2f53783" containerID="da5f46c2bee9022cc73ca1433a78e88db0f72656fa6ec556672cbbd3c18cd211" exitCode=0 Sep 29 09:57:38 crc kubenswrapper[4779]: I0929 09:57:38.383141 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-nj5nz" event={"ID":"172c15cd-62e0-4fe1-9132-cb7df2f53783","Type":"ContainerDied","Data":"da5f46c2bee9022cc73ca1433a78e88db0f72656fa6ec556672cbbd3c18cd211"} Sep 29 09:57:38 crc kubenswrapper[4779]: I0929 09:57:38.639238 4779 scope.go:117] "RemoveContainer" containerID="467d51c085ba826129d6035eb1d48f077434ce2a2043285efbb674394301142e" Sep 29 09:57:38 crc kubenswrapper[4779]: I0929 09:57:38.662506 4779 scope.go:117] "RemoveContainer" containerID="4969b95ad9ab5f47b8b94d066a3810fc308403f20352f8fcdb69eef9b6a6217d" Sep 29 09:57:38 crc kubenswrapper[4779]: I0929 09:57:38.703089 4779 scope.go:117] "RemoveContainer" containerID="c37f17b2b19d62939c704152ebe4158ee7cf161fd0936e7e01c819d5b8d80d32" Sep 29 09:57:38 crc kubenswrapper[4779]: I0929 09:57:38.725176 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ced430a8-7031-48a6-a86e-e827ef13b166" path="/var/lib/kubelet/pods/ced430a8-7031-48a6-a86e-e827ef13b166/volumes" Sep 29 09:57:38 crc kubenswrapper[4779]: I0929 09:57:38.748115 4779 scope.go:117] "RemoveContainer" containerID="c3cbd235fb5bfce2b581612ad9c8e12fb5b750d27620881f19a59f849c745198" Sep 29 09:57:38 crc kubenswrapper[4779]: I0929 09:57:38.801787 4779 scope.go:117] "RemoveContainer" containerID="b0b48659d5759491686c674715b7bcc182aa0be052f7a3aea021da7ae6591444" Sep 29 09:57:38 crc kubenswrapper[4779]: I0929 09:57:38.869322 4779 scope.go:117] "RemoveContainer" containerID="bd27468f74c9b9abf433abbfac322209d678e5c83e82d3f76d8091d675f36e13" Sep 29 09:57:38 crc kubenswrapper[4779]: I0929 09:57:38.902921 4779 scope.go:117] "RemoveContainer" containerID="655290d1944f6c2a344112d44d901a4645b3ca42747d27ba3cf78796cfc8123f" Sep 29 09:57:38 crc kubenswrapper[4779]: I0929 09:57:38.938828 4779 scope.go:117] "RemoveContainer" containerID="c456b6e16ea1a22b8814a3890ae26521782e99f0233c8e5adf80c3adf9c537a2" Sep 29 09:57:38 crc kubenswrapper[4779]: I0929 09:57:38.994243 4779 scope.go:117] "RemoveContainer" containerID="37359daf67a0c5c01f804fcfbb1eb5f2a163e6762852fc72e7ed7179c701ffaa" Sep 29 09:57:39 crc kubenswrapper[4779]: I0929 09:57:39.801347 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-nj5nz" Sep 29 09:57:39 crc kubenswrapper[4779]: I0929 09:57:39.859698 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rbfvt\" (UniqueName: \"kubernetes.io/projected/172c15cd-62e0-4fe1-9132-cb7df2f53783-kube-api-access-rbfvt\") pod \"172c15cd-62e0-4fe1-9132-cb7df2f53783\" (UID: \"172c15cd-62e0-4fe1-9132-cb7df2f53783\") " Sep 29 09:57:39 crc kubenswrapper[4779]: I0929 09:57:39.859774 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/172c15cd-62e0-4fe1-9132-cb7df2f53783-inventory\") pod \"172c15cd-62e0-4fe1-9132-cb7df2f53783\" (UID: \"172c15cd-62e0-4fe1-9132-cb7df2f53783\") " Sep 29 09:57:39 crc kubenswrapper[4779]: I0929 09:57:39.859804 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/172c15cd-62e0-4fe1-9132-cb7df2f53783-ssh-key\") pod \"172c15cd-62e0-4fe1-9132-cb7df2f53783\" (UID: \"172c15cd-62e0-4fe1-9132-cb7df2f53783\") " Sep 29 09:57:39 crc kubenswrapper[4779]: I0929 09:57:39.865923 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/172c15cd-62e0-4fe1-9132-cb7df2f53783-kube-api-access-rbfvt" (OuterVolumeSpecName: "kube-api-access-rbfvt") pod "172c15cd-62e0-4fe1-9132-cb7df2f53783" (UID: "172c15cd-62e0-4fe1-9132-cb7df2f53783"). InnerVolumeSpecName "kube-api-access-rbfvt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:57:39 crc kubenswrapper[4779]: I0929 09:57:39.888595 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/172c15cd-62e0-4fe1-9132-cb7df2f53783-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "172c15cd-62e0-4fe1-9132-cb7df2f53783" (UID: "172c15cd-62e0-4fe1-9132-cb7df2f53783"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:57:39 crc kubenswrapper[4779]: I0929 09:57:39.891052 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/172c15cd-62e0-4fe1-9132-cb7df2f53783-inventory" (OuterVolumeSpecName: "inventory") pod "172c15cd-62e0-4fe1-9132-cb7df2f53783" (UID: "172c15cd-62e0-4fe1-9132-cb7df2f53783"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:57:39 crc kubenswrapper[4779]: I0929 09:57:39.962339 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rbfvt\" (UniqueName: \"kubernetes.io/projected/172c15cd-62e0-4fe1-9132-cb7df2f53783-kube-api-access-rbfvt\") on node \"crc\" DevicePath \"\"" Sep 29 09:57:39 crc kubenswrapper[4779]: I0929 09:57:39.962415 4779 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/172c15cd-62e0-4fe1-9132-cb7df2f53783-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 09:57:39 crc kubenswrapper[4779]: I0929 09:57:39.962428 4779 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/172c15cd-62e0-4fe1-9132-cb7df2f53783-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 09:57:40 crc kubenswrapper[4779]: I0929 09:57:40.409867 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-nj5nz" event={"ID":"172c15cd-62e0-4fe1-9132-cb7df2f53783","Type":"ContainerDied","Data":"a1acb8bd7e378ba84681bc686b2a933846c899f324b8e34753ba422fc2ab5e69"} Sep 29 09:57:40 crc kubenswrapper[4779]: I0929 09:57:40.410205 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a1acb8bd7e378ba84681bc686b2a933846c899f324b8e34753ba422fc2ab5e69" Sep 29 09:57:40 crc kubenswrapper[4779]: I0929 09:57:40.409968 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-nj5nz" Sep 29 09:57:40 crc kubenswrapper[4779]: I0929 09:57:40.474334 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-ff8wp"] Sep 29 09:57:40 crc kubenswrapper[4779]: E0929 09:57:40.474745 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="172c15cd-62e0-4fe1-9132-cb7df2f53783" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Sep 29 09:57:40 crc kubenswrapper[4779]: I0929 09:57:40.474767 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="172c15cd-62e0-4fe1-9132-cb7df2f53783" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Sep 29 09:57:40 crc kubenswrapper[4779]: I0929 09:57:40.476641 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="172c15cd-62e0-4fe1-9132-cb7df2f53783" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Sep 29 09:57:40 crc kubenswrapper[4779]: I0929 09:57:40.477589 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-ff8wp" Sep 29 09:57:40 crc kubenswrapper[4779]: I0929 09:57:40.479419 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-l7nsb" Sep 29 09:57:40 crc kubenswrapper[4779]: I0929 09:57:40.479542 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 09:57:40 crc kubenswrapper[4779]: I0929 09:57:40.480168 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 09:57:40 crc kubenswrapper[4779]: I0929 09:57:40.480894 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 09:57:40 crc kubenswrapper[4779]: I0929 09:57:40.483452 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-ff8wp"] Sep 29 09:57:40 crc kubenswrapper[4779]: I0929 09:57:40.573577 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2be23c9e-69cc-40af-8823-6ce976a94c94-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-ff8wp\" (UID: \"2be23c9e-69cc-40af-8823-6ce976a94c94\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-ff8wp" Sep 29 09:57:40 crc kubenswrapper[4779]: I0929 09:57:40.573702 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2be23c9e-69cc-40af-8823-6ce976a94c94-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-ff8wp\" (UID: \"2be23c9e-69cc-40af-8823-6ce976a94c94\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-ff8wp" Sep 29 09:57:40 crc kubenswrapper[4779]: I0929 09:57:40.573791 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gnzrr\" (UniqueName: \"kubernetes.io/projected/2be23c9e-69cc-40af-8823-6ce976a94c94-kube-api-access-gnzrr\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-ff8wp\" (UID: \"2be23c9e-69cc-40af-8823-6ce976a94c94\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-ff8wp" Sep 29 09:57:40 crc kubenswrapper[4779]: I0929 09:57:40.675841 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gnzrr\" (UniqueName: \"kubernetes.io/projected/2be23c9e-69cc-40af-8823-6ce976a94c94-kube-api-access-gnzrr\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-ff8wp\" (UID: \"2be23c9e-69cc-40af-8823-6ce976a94c94\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-ff8wp" Sep 29 09:57:40 crc kubenswrapper[4779]: I0929 09:57:40.675952 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2be23c9e-69cc-40af-8823-6ce976a94c94-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-ff8wp\" (UID: \"2be23c9e-69cc-40af-8823-6ce976a94c94\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-ff8wp" Sep 29 09:57:40 crc kubenswrapper[4779]: I0929 09:57:40.676091 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2be23c9e-69cc-40af-8823-6ce976a94c94-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-ff8wp\" (UID: \"2be23c9e-69cc-40af-8823-6ce976a94c94\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-ff8wp" Sep 29 09:57:40 crc kubenswrapper[4779]: I0929 09:57:40.681542 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2be23c9e-69cc-40af-8823-6ce976a94c94-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-ff8wp\" (UID: \"2be23c9e-69cc-40af-8823-6ce976a94c94\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-ff8wp" Sep 29 09:57:40 crc kubenswrapper[4779]: I0929 09:57:40.682216 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2be23c9e-69cc-40af-8823-6ce976a94c94-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-ff8wp\" (UID: \"2be23c9e-69cc-40af-8823-6ce976a94c94\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-ff8wp" Sep 29 09:57:40 crc kubenswrapper[4779]: I0929 09:57:40.700560 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gnzrr\" (UniqueName: \"kubernetes.io/projected/2be23c9e-69cc-40af-8823-6ce976a94c94-kube-api-access-gnzrr\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-ff8wp\" (UID: \"2be23c9e-69cc-40af-8823-6ce976a94c94\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-ff8wp" Sep 29 09:57:40 crc kubenswrapper[4779]: I0929 09:57:40.803496 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-ff8wp" Sep 29 09:57:41 crc kubenswrapper[4779]: I0929 09:57:41.310597 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-ff8wp"] Sep 29 09:57:41 crc kubenswrapper[4779]: I0929 09:57:41.418352 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-ff8wp" event={"ID":"2be23c9e-69cc-40af-8823-6ce976a94c94","Type":"ContainerStarted","Data":"9cc1843c9de8be581a82aa0c1216d9d952680eb383dd56341a1d15a734e5ea10"} Sep 29 09:57:42 crc kubenswrapper[4779]: I0929 09:57:42.435042 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-ff8wp" event={"ID":"2be23c9e-69cc-40af-8823-6ce976a94c94","Type":"ContainerStarted","Data":"48673680e28603ce1e9c3a4396dcf505769fb313966f6269ed1ea23e0b4d11cb"} Sep 29 09:57:42 crc kubenswrapper[4779]: I0929 09:57:42.453615 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-ff8wp" podStartSLOduration=1.9631924509999998 podStartE2EDuration="2.453596054s" podCreationTimestamp="2025-09-29 09:57:40 +0000 UTC" firstStartedPulling="2025-09-29 09:57:41.319692699 +0000 UTC m=+1693.301016603" lastFinishedPulling="2025-09-29 09:57:41.810096282 +0000 UTC m=+1693.791420206" observedRunningTime="2025-09-29 09:57:42.449087493 +0000 UTC m=+1694.430411407" watchObservedRunningTime="2025-09-29 09:57:42.453596054 +0000 UTC m=+1694.434919958" Sep 29 09:57:46 crc kubenswrapper[4779]: I0929 09:57:46.966838 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 09:57:46 crc kubenswrapper[4779]: I0929 09:57:46.967472 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 09:57:46 crc kubenswrapper[4779]: I0929 09:57:46.967520 4779 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" Sep 29 09:57:46 crc kubenswrapper[4779]: I0929 09:57:46.968381 4779 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e12405b2f177552bd75b0aa0910938d5b67c3d7c2041800b3176b0508ac82f97"} pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 09:57:46 crc kubenswrapper[4779]: I0929 09:57:46.968453 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" containerID="cri-o://e12405b2f177552bd75b0aa0910938d5b67c3d7c2041800b3176b0508ac82f97" gracePeriod=600 Sep 29 09:57:47 crc kubenswrapper[4779]: E0929 09:57:47.089422 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 09:57:47 crc kubenswrapper[4779]: I0929 09:57:47.496954 4779 generic.go:334] "Generic (PLEG): container finished" podID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerID="e12405b2f177552bd75b0aa0910938d5b67c3d7c2041800b3176b0508ac82f97" exitCode=0 Sep 29 09:57:47 crc kubenswrapper[4779]: I0929 09:57:47.497010 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" event={"ID":"f1a5d3a7-37d9-4a87-864c-e4af7f504a19","Type":"ContainerDied","Data":"e12405b2f177552bd75b0aa0910938d5b67c3d7c2041800b3176b0508ac82f97"} Sep 29 09:57:47 crc kubenswrapper[4779]: I0929 09:57:47.497058 4779 scope.go:117] "RemoveContainer" containerID="9b76d7f97cdc0312b7d80786a3004ed45f01d356ab8c91f3656d0fd71503e713" Sep 29 09:57:47 crc kubenswrapper[4779]: I0929 09:57:47.497696 4779 scope.go:117] "RemoveContainer" containerID="e12405b2f177552bd75b0aa0910938d5b67c3d7c2041800b3176b0508ac82f97" Sep 29 09:57:47 crc kubenswrapper[4779]: E0929 09:57:47.498111 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 09:58:01 crc kubenswrapper[4779]: I0929 09:58:01.713727 4779 scope.go:117] "RemoveContainer" containerID="e12405b2f177552bd75b0aa0910938d5b67c3d7c2041800b3176b0508ac82f97" Sep 29 09:58:01 crc kubenswrapper[4779]: E0929 09:58:01.714443 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 09:58:02 crc kubenswrapper[4779]: I0929 09:58:02.045000 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-j9cwn"] Sep 29 09:58:02 crc kubenswrapper[4779]: I0929 09:58:02.056091 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-czmrh"] Sep 29 09:58:02 crc kubenswrapper[4779]: I0929 09:58:02.063662 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-czmrh"] Sep 29 09:58:02 crc kubenswrapper[4779]: I0929 09:58:02.071075 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-j9cwn"] Sep 29 09:58:02 crc kubenswrapper[4779]: I0929 09:58:02.728617 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="18171c09-ccc7-45fd-955c-b3dc2c7eff55" path="/var/lib/kubelet/pods/18171c09-ccc7-45fd-955c-b3dc2c7eff55/volumes" Sep 29 09:58:02 crc kubenswrapper[4779]: I0929 09:58:02.730487 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4f48c0b6-5c74-4587-b8b5-47e7681dd657" path="/var/lib/kubelet/pods/4f48c0b6-5c74-4587-b8b5-47e7681dd657/volumes" Sep 29 09:58:03 crc kubenswrapper[4779]: I0929 09:58:03.036516 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-8rht4"] Sep 29 09:58:03 crc kubenswrapper[4779]: I0929 09:58:03.049734 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-8rht4"] Sep 29 09:58:04 crc kubenswrapper[4779]: I0929 09:58:04.734385 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a9163f19-a7b2-4128-bcd9-e37cd9ff7782" path="/var/lib/kubelet/pods/a9163f19-a7b2-4128-bcd9-e37cd9ff7782/volumes" Sep 29 09:58:12 crc kubenswrapper[4779]: I0929 09:58:12.025643 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-6336-account-create-9wth8"] Sep 29 09:58:12 crc kubenswrapper[4779]: I0929 09:58:12.032492 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-8747-account-create-44d8p"] Sep 29 09:58:12 crc kubenswrapper[4779]: I0929 09:58:12.039539 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-6336-account-create-9wth8"] Sep 29 09:58:12 crc kubenswrapper[4779]: I0929 09:58:12.048623 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-8747-account-create-44d8p"] Sep 29 09:58:12 crc kubenswrapper[4779]: I0929 09:58:12.723328 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="00efaa87-60ad-4589-93fc-560b8acf0d5a" path="/var/lib/kubelet/pods/00efaa87-60ad-4589-93fc-560b8acf0d5a/volumes" Sep 29 09:58:12 crc kubenswrapper[4779]: I0929 09:58:12.723851 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="32ffe9bf-069f-4ba3-972b-abb6917e29b3" path="/var/lib/kubelet/pods/32ffe9bf-069f-4ba3-972b-abb6917e29b3/volumes" Sep 29 09:58:13 crc kubenswrapper[4779]: I0929 09:58:13.031686 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-4152-account-create-7zn2j"] Sep 29 09:58:13 crc kubenswrapper[4779]: I0929 09:58:13.042932 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-4152-account-create-7zn2j"] Sep 29 09:58:14 crc kubenswrapper[4779]: I0929 09:58:14.729097 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1c4225e6-9b2d-4c40-a892-baa6290c8ebb" path="/var/lib/kubelet/pods/1c4225e6-9b2d-4c40-a892-baa6290c8ebb/volumes" Sep 29 09:58:15 crc kubenswrapper[4779]: I0929 09:58:15.713752 4779 scope.go:117] "RemoveContainer" containerID="e12405b2f177552bd75b0aa0910938d5b67c3d7c2041800b3176b0508ac82f97" Sep 29 09:58:15 crc kubenswrapper[4779]: E0929 09:58:15.714005 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 09:58:29 crc kubenswrapper[4779]: I0929 09:58:29.714996 4779 scope.go:117] "RemoveContainer" containerID="e12405b2f177552bd75b0aa0910938d5b67c3d7c2041800b3176b0508ac82f97" Sep 29 09:58:29 crc kubenswrapper[4779]: E0929 09:58:29.716378 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 09:58:38 crc kubenswrapper[4779]: I0929 09:58:38.024182 4779 generic.go:334] "Generic (PLEG): container finished" podID="2be23c9e-69cc-40af-8823-6ce976a94c94" containerID="48673680e28603ce1e9c3a4396dcf505769fb313966f6269ed1ea23e0b4d11cb" exitCode=0 Sep 29 09:58:38 crc kubenswrapper[4779]: I0929 09:58:38.024263 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-ff8wp" event={"ID":"2be23c9e-69cc-40af-8823-6ce976a94c94","Type":"ContainerDied","Data":"48673680e28603ce1e9c3a4396dcf505769fb313966f6269ed1ea23e0b4d11cb"} Sep 29 09:58:39 crc kubenswrapper[4779]: I0929 09:58:39.056847 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-n7dpw"] Sep 29 09:58:39 crc kubenswrapper[4779]: I0929 09:58:39.069020 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-n7dpw"] Sep 29 09:58:39 crc kubenswrapper[4779]: I0929 09:58:39.157596 4779 scope.go:117] "RemoveContainer" containerID="227f8ca94312ba4ef34cc3bdddbd28879e4df36ad6a813a9b6d15ee0c9056416" Sep 29 09:58:39 crc kubenswrapper[4779]: I0929 09:58:39.192852 4779 scope.go:117] "RemoveContainer" containerID="4ea8f59e35b1f420e76dc43cd662b6ac1390288dc40631948ed056bc890a5c98" Sep 29 09:58:39 crc kubenswrapper[4779]: I0929 09:58:39.229989 4779 scope.go:117] "RemoveContainer" containerID="a6b35bca0bba3807d404b21d87521a4db912cccde982b52f8077fa8759559c91" Sep 29 09:58:39 crc kubenswrapper[4779]: I0929 09:58:39.266620 4779 scope.go:117] "RemoveContainer" containerID="231d94dc2be4321a64c5bbef5eacb99b78cbfdfa3a05541dd1f8966f6833b212" Sep 29 09:58:39 crc kubenswrapper[4779]: I0929 09:58:39.327003 4779 scope.go:117] "RemoveContainer" containerID="128377e4bc174d0c0bb389eadf07dc574fb2c01709164303e64993b254a70634" Sep 29 09:58:39 crc kubenswrapper[4779]: I0929 09:58:39.372987 4779 scope.go:117] "RemoveContainer" containerID="1950c9fb9a81ebcb3bf6701a0cd72136a49bca222f173ffd526ab1e05a9d9b1a" Sep 29 09:58:39 crc kubenswrapper[4779]: I0929 09:58:39.407951 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-ff8wp" Sep 29 09:58:39 crc kubenswrapper[4779]: I0929 09:58:39.408342 4779 scope.go:117] "RemoveContainer" containerID="159a355342623059b2a7d69cd30beedc566ffc4fa9f5341bd02c36a3289d3b85" Sep 29 09:58:39 crc kubenswrapper[4779]: I0929 09:58:39.411240 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2be23c9e-69cc-40af-8823-6ce976a94c94-ssh-key\") pod \"2be23c9e-69cc-40af-8823-6ce976a94c94\" (UID: \"2be23c9e-69cc-40af-8823-6ce976a94c94\") " Sep 29 09:58:39 crc kubenswrapper[4779]: I0929 09:58:39.411353 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2be23c9e-69cc-40af-8823-6ce976a94c94-inventory\") pod \"2be23c9e-69cc-40af-8823-6ce976a94c94\" (UID: \"2be23c9e-69cc-40af-8823-6ce976a94c94\") " Sep 29 09:58:39 crc kubenswrapper[4779]: I0929 09:58:39.416501 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gnzrr\" (UniqueName: \"kubernetes.io/projected/2be23c9e-69cc-40af-8823-6ce976a94c94-kube-api-access-gnzrr\") pod \"2be23c9e-69cc-40af-8823-6ce976a94c94\" (UID: \"2be23c9e-69cc-40af-8823-6ce976a94c94\") " Sep 29 09:58:39 crc kubenswrapper[4779]: I0929 09:58:39.424884 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2be23c9e-69cc-40af-8823-6ce976a94c94-kube-api-access-gnzrr" (OuterVolumeSpecName: "kube-api-access-gnzrr") pod "2be23c9e-69cc-40af-8823-6ce976a94c94" (UID: "2be23c9e-69cc-40af-8823-6ce976a94c94"). InnerVolumeSpecName "kube-api-access-gnzrr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:58:39 crc kubenswrapper[4779]: I0929 09:58:39.425590 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gnzrr\" (UniqueName: \"kubernetes.io/projected/2be23c9e-69cc-40af-8823-6ce976a94c94-kube-api-access-gnzrr\") on node \"crc\" DevicePath \"\"" Sep 29 09:58:39 crc kubenswrapper[4779]: I0929 09:58:39.450894 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2be23c9e-69cc-40af-8823-6ce976a94c94-inventory" (OuterVolumeSpecName: "inventory") pod "2be23c9e-69cc-40af-8823-6ce976a94c94" (UID: "2be23c9e-69cc-40af-8823-6ce976a94c94"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:58:39 crc kubenswrapper[4779]: I0929 09:58:39.457784 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2be23c9e-69cc-40af-8823-6ce976a94c94-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "2be23c9e-69cc-40af-8823-6ce976a94c94" (UID: "2be23c9e-69cc-40af-8823-6ce976a94c94"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:58:39 crc kubenswrapper[4779]: I0929 09:58:39.527266 4779 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2be23c9e-69cc-40af-8823-6ce976a94c94-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 09:58:39 crc kubenswrapper[4779]: I0929 09:58:39.527315 4779 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2be23c9e-69cc-40af-8823-6ce976a94c94-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 09:58:40 crc kubenswrapper[4779]: I0929 09:58:40.053053 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-ff8wp" event={"ID":"2be23c9e-69cc-40af-8823-6ce976a94c94","Type":"ContainerDied","Data":"9cc1843c9de8be581a82aa0c1216d9d952680eb383dd56341a1d15a734e5ea10"} Sep 29 09:58:40 crc kubenswrapper[4779]: I0929 09:58:40.053103 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9cc1843c9de8be581a82aa0c1216d9d952680eb383dd56341a1d15a734e5ea10" Sep 29 09:58:40 crc kubenswrapper[4779]: I0929 09:58:40.053122 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-ff8wp" Sep 29 09:58:40 crc kubenswrapper[4779]: I0929 09:58:40.135993 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-7vf2k"] Sep 29 09:58:40 crc kubenswrapper[4779]: E0929 09:58:40.136490 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2be23c9e-69cc-40af-8823-6ce976a94c94" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Sep 29 09:58:40 crc kubenswrapper[4779]: I0929 09:58:40.136511 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="2be23c9e-69cc-40af-8823-6ce976a94c94" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Sep 29 09:58:40 crc kubenswrapper[4779]: I0929 09:58:40.136751 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="2be23c9e-69cc-40af-8823-6ce976a94c94" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Sep 29 09:58:40 crc kubenswrapper[4779]: I0929 09:58:40.137691 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-7vf2k" Sep 29 09:58:40 crc kubenswrapper[4779]: I0929 09:58:40.143322 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 09:58:40 crc kubenswrapper[4779]: I0929 09:58:40.143415 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 09:58:40 crc kubenswrapper[4779]: I0929 09:58:40.143660 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 09:58:40 crc kubenswrapper[4779]: I0929 09:58:40.143884 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-l7nsb" Sep 29 09:58:40 crc kubenswrapper[4779]: I0929 09:58:40.152334 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-7vf2k"] Sep 29 09:58:40 crc kubenswrapper[4779]: I0929 09:58:40.241598 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/20b27d7e-b6c0-4d17-9e24-2c4129089482-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-7vf2k\" (UID: \"20b27d7e-b6c0-4d17-9e24-2c4129089482\") " pod="openstack/ssh-known-hosts-edpm-deployment-7vf2k" Sep 29 09:58:40 crc kubenswrapper[4779]: I0929 09:58:40.242320 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/20b27d7e-b6c0-4d17-9e24-2c4129089482-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-7vf2k\" (UID: \"20b27d7e-b6c0-4d17-9e24-2c4129089482\") " pod="openstack/ssh-known-hosts-edpm-deployment-7vf2k" Sep 29 09:58:40 crc kubenswrapper[4779]: I0929 09:58:40.242469 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gcjk7\" (UniqueName: \"kubernetes.io/projected/20b27d7e-b6c0-4d17-9e24-2c4129089482-kube-api-access-gcjk7\") pod \"ssh-known-hosts-edpm-deployment-7vf2k\" (UID: \"20b27d7e-b6c0-4d17-9e24-2c4129089482\") " pod="openstack/ssh-known-hosts-edpm-deployment-7vf2k" Sep 29 09:58:40 crc kubenswrapper[4779]: I0929 09:58:40.344759 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/20b27d7e-b6c0-4d17-9e24-2c4129089482-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-7vf2k\" (UID: \"20b27d7e-b6c0-4d17-9e24-2c4129089482\") " pod="openstack/ssh-known-hosts-edpm-deployment-7vf2k" Sep 29 09:58:40 crc kubenswrapper[4779]: I0929 09:58:40.344815 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/20b27d7e-b6c0-4d17-9e24-2c4129089482-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-7vf2k\" (UID: \"20b27d7e-b6c0-4d17-9e24-2c4129089482\") " pod="openstack/ssh-known-hosts-edpm-deployment-7vf2k" Sep 29 09:58:40 crc kubenswrapper[4779]: I0929 09:58:40.344845 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gcjk7\" (UniqueName: \"kubernetes.io/projected/20b27d7e-b6c0-4d17-9e24-2c4129089482-kube-api-access-gcjk7\") pod \"ssh-known-hosts-edpm-deployment-7vf2k\" (UID: \"20b27d7e-b6c0-4d17-9e24-2c4129089482\") " pod="openstack/ssh-known-hosts-edpm-deployment-7vf2k" Sep 29 09:58:40 crc kubenswrapper[4779]: I0929 09:58:40.349228 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/20b27d7e-b6c0-4d17-9e24-2c4129089482-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-7vf2k\" (UID: \"20b27d7e-b6c0-4d17-9e24-2c4129089482\") " pod="openstack/ssh-known-hosts-edpm-deployment-7vf2k" Sep 29 09:58:40 crc kubenswrapper[4779]: I0929 09:58:40.361687 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/20b27d7e-b6c0-4d17-9e24-2c4129089482-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-7vf2k\" (UID: \"20b27d7e-b6c0-4d17-9e24-2c4129089482\") " pod="openstack/ssh-known-hosts-edpm-deployment-7vf2k" Sep 29 09:58:40 crc kubenswrapper[4779]: I0929 09:58:40.364248 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gcjk7\" (UniqueName: \"kubernetes.io/projected/20b27d7e-b6c0-4d17-9e24-2c4129089482-kube-api-access-gcjk7\") pod \"ssh-known-hosts-edpm-deployment-7vf2k\" (UID: \"20b27d7e-b6c0-4d17-9e24-2c4129089482\") " pod="openstack/ssh-known-hosts-edpm-deployment-7vf2k" Sep 29 09:58:40 crc kubenswrapper[4779]: I0929 09:58:40.463366 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-7vf2k" Sep 29 09:58:40 crc kubenswrapper[4779]: I0929 09:58:40.736773 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d8fc4bcf-88f0-4a27-97e2-e9b452a630c0" path="/var/lib/kubelet/pods/d8fc4bcf-88f0-4a27-97e2-e9b452a630c0/volumes" Sep 29 09:58:40 crc kubenswrapper[4779]: I0929 09:58:40.977054 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-7vf2k"] Sep 29 09:58:41 crc kubenswrapper[4779]: I0929 09:58:41.063494 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-7vf2k" event={"ID":"20b27d7e-b6c0-4d17-9e24-2c4129089482","Type":"ContainerStarted","Data":"1263b7c019fe4083b260c0369ec60e6b76781f346258cb32ed65120ddab7802a"} Sep 29 09:58:42 crc kubenswrapper[4779]: I0929 09:58:42.072922 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-7vf2k" event={"ID":"20b27d7e-b6c0-4d17-9e24-2c4129089482","Type":"ContainerStarted","Data":"56a824f38aa1a52efd8ad9713f0fdd4c60d89348344bb5740bb1beb650642ccd"} Sep 29 09:58:42 crc kubenswrapper[4779]: I0929 09:58:42.089811 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-7vf2k" podStartSLOduration=1.5501209889999998 podStartE2EDuration="2.089790603s" podCreationTimestamp="2025-09-29 09:58:40 +0000 UTC" firstStartedPulling="2025-09-29 09:58:40.984610514 +0000 UTC m=+1752.965934418" lastFinishedPulling="2025-09-29 09:58:41.524280088 +0000 UTC m=+1753.505604032" observedRunningTime="2025-09-29 09:58:42.086642292 +0000 UTC m=+1754.067966196" watchObservedRunningTime="2025-09-29 09:58:42.089790603 +0000 UTC m=+1754.071114507" Sep 29 09:58:44 crc kubenswrapper[4779]: I0929 09:58:44.714754 4779 scope.go:117] "RemoveContainer" containerID="e12405b2f177552bd75b0aa0910938d5b67c3d7c2041800b3176b0508ac82f97" Sep 29 09:58:44 crc kubenswrapper[4779]: E0929 09:58:44.715721 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 09:58:49 crc kubenswrapper[4779]: I0929 09:58:49.126586 4779 generic.go:334] "Generic (PLEG): container finished" podID="20b27d7e-b6c0-4d17-9e24-2c4129089482" containerID="56a824f38aa1a52efd8ad9713f0fdd4c60d89348344bb5740bb1beb650642ccd" exitCode=0 Sep 29 09:58:49 crc kubenswrapper[4779]: I0929 09:58:49.126632 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-7vf2k" event={"ID":"20b27d7e-b6c0-4d17-9e24-2c4129089482","Type":"ContainerDied","Data":"56a824f38aa1a52efd8ad9713f0fdd4c60d89348344bb5740bb1beb650642ccd"} Sep 29 09:58:50 crc kubenswrapper[4779]: I0929 09:58:50.513841 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-7vf2k" Sep 29 09:58:50 crc kubenswrapper[4779]: I0929 09:58:50.557823 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/20b27d7e-b6c0-4d17-9e24-2c4129089482-inventory-0\") pod \"20b27d7e-b6c0-4d17-9e24-2c4129089482\" (UID: \"20b27d7e-b6c0-4d17-9e24-2c4129089482\") " Sep 29 09:58:50 crc kubenswrapper[4779]: I0929 09:58:50.560173 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/20b27d7e-b6c0-4d17-9e24-2c4129089482-ssh-key-openstack-edpm-ipam\") pod \"20b27d7e-b6c0-4d17-9e24-2c4129089482\" (UID: \"20b27d7e-b6c0-4d17-9e24-2c4129089482\") " Sep 29 09:58:50 crc kubenswrapper[4779]: I0929 09:58:50.560324 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gcjk7\" (UniqueName: \"kubernetes.io/projected/20b27d7e-b6c0-4d17-9e24-2c4129089482-kube-api-access-gcjk7\") pod \"20b27d7e-b6c0-4d17-9e24-2c4129089482\" (UID: \"20b27d7e-b6c0-4d17-9e24-2c4129089482\") " Sep 29 09:58:50 crc kubenswrapper[4779]: I0929 09:58:50.564262 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b27d7e-b6c0-4d17-9e24-2c4129089482-kube-api-access-gcjk7" (OuterVolumeSpecName: "kube-api-access-gcjk7") pod "20b27d7e-b6c0-4d17-9e24-2c4129089482" (UID: "20b27d7e-b6c0-4d17-9e24-2c4129089482"). InnerVolumeSpecName "kube-api-access-gcjk7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:58:50 crc kubenswrapper[4779]: I0929 09:58:50.588028 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b27d7e-b6c0-4d17-9e24-2c4129089482-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "20b27d7e-b6c0-4d17-9e24-2c4129089482" (UID: "20b27d7e-b6c0-4d17-9e24-2c4129089482"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:58:50 crc kubenswrapper[4779]: I0929 09:58:50.588696 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b27d7e-b6c0-4d17-9e24-2c4129089482-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "20b27d7e-b6c0-4d17-9e24-2c4129089482" (UID: "20b27d7e-b6c0-4d17-9e24-2c4129089482"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:58:50 crc kubenswrapper[4779]: I0929 09:58:50.663019 4779 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/20b27d7e-b6c0-4d17-9e24-2c4129089482-inventory-0\") on node \"crc\" DevicePath \"\"" Sep 29 09:58:50 crc kubenswrapper[4779]: I0929 09:58:50.663052 4779 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/20b27d7e-b6c0-4d17-9e24-2c4129089482-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Sep 29 09:58:50 crc kubenswrapper[4779]: I0929 09:58:50.663063 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gcjk7\" (UniqueName: \"kubernetes.io/projected/20b27d7e-b6c0-4d17-9e24-2c4129089482-kube-api-access-gcjk7\") on node \"crc\" DevicePath \"\"" Sep 29 09:58:51 crc kubenswrapper[4779]: I0929 09:58:51.146314 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-7vf2k" event={"ID":"20b27d7e-b6c0-4d17-9e24-2c4129089482","Type":"ContainerDied","Data":"1263b7c019fe4083b260c0369ec60e6b76781f346258cb32ed65120ddab7802a"} Sep 29 09:58:51 crc kubenswrapper[4779]: I0929 09:58:51.146367 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-7vf2k" Sep 29 09:58:51 crc kubenswrapper[4779]: I0929 09:58:51.146387 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1263b7c019fe4083b260c0369ec60e6b76781f346258cb32ed65120ddab7802a" Sep 29 09:58:51 crc kubenswrapper[4779]: I0929 09:58:51.228235 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-5v4bz"] Sep 29 09:58:51 crc kubenswrapper[4779]: E0929 09:58:51.228697 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20b27d7e-b6c0-4d17-9e24-2c4129089482" containerName="ssh-known-hosts-edpm-deployment" Sep 29 09:58:51 crc kubenswrapper[4779]: I0929 09:58:51.228722 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="20b27d7e-b6c0-4d17-9e24-2c4129089482" containerName="ssh-known-hosts-edpm-deployment" Sep 29 09:58:51 crc kubenswrapper[4779]: I0929 09:58:51.228953 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="20b27d7e-b6c0-4d17-9e24-2c4129089482" containerName="ssh-known-hosts-edpm-deployment" Sep 29 09:58:51 crc kubenswrapper[4779]: I0929 09:58:51.229709 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-5v4bz" Sep 29 09:58:51 crc kubenswrapper[4779]: I0929 09:58:51.232147 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-l7nsb" Sep 29 09:58:51 crc kubenswrapper[4779]: I0929 09:58:51.232781 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 09:58:51 crc kubenswrapper[4779]: I0929 09:58:51.233154 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 09:58:51 crc kubenswrapper[4779]: I0929 09:58:51.233687 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 09:58:51 crc kubenswrapper[4779]: I0929 09:58:51.237946 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-5v4bz"] Sep 29 09:58:51 crc kubenswrapper[4779]: I0929 09:58:51.273988 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fd59bf76-22af-4461-b023-45c86757140d-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-5v4bz\" (UID: \"fd59bf76-22af-4461-b023-45c86757140d\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-5v4bz" Sep 29 09:58:51 crc kubenswrapper[4779]: I0929 09:58:51.274072 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fd59bf76-22af-4461-b023-45c86757140d-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-5v4bz\" (UID: \"fd59bf76-22af-4461-b023-45c86757140d\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-5v4bz" Sep 29 09:58:51 crc kubenswrapper[4779]: I0929 09:58:51.274174 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mzp6z\" (UniqueName: \"kubernetes.io/projected/fd59bf76-22af-4461-b023-45c86757140d-kube-api-access-mzp6z\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-5v4bz\" (UID: \"fd59bf76-22af-4461-b023-45c86757140d\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-5v4bz" Sep 29 09:58:51 crc kubenswrapper[4779]: I0929 09:58:51.376488 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mzp6z\" (UniqueName: \"kubernetes.io/projected/fd59bf76-22af-4461-b023-45c86757140d-kube-api-access-mzp6z\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-5v4bz\" (UID: \"fd59bf76-22af-4461-b023-45c86757140d\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-5v4bz" Sep 29 09:58:51 crc kubenswrapper[4779]: I0929 09:58:51.376583 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fd59bf76-22af-4461-b023-45c86757140d-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-5v4bz\" (UID: \"fd59bf76-22af-4461-b023-45c86757140d\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-5v4bz" Sep 29 09:58:51 crc kubenswrapper[4779]: I0929 09:58:51.376652 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fd59bf76-22af-4461-b023-45c86757140d-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-5v4bz\" (UID: \"fd59bf76-22af-4461-b023-45c86757140d\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-5v4bz" Sep 29 09:58:51 crc kubenswrapper[4779]: I0929 09:58:51.380452 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fd59bf76-22af-4461-b023-45c86757140d-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-5v4bz\" (UID: \"fd59bf76-22af-4461-b023-45c86757140d\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-5v4bz" Sep 29 09:58:51 crc kubenswrapper[4779]: I0929 09:58:51.389398 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fd59bf76-22af-4461-b023-45c86757140d-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-5v4bz\" (UID: \"fd59bf76-22af-4461-b023-45c86757140d\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-5v4bz" Sep 29 09:58:51 crc kubenswrapper[4779]: I0929 09:58:51.392048 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mzp6z\" (UniqueName: \"kubernetes.io/projected/fd59bf76-22af-4461-b023-45c86757140d-kube-api-access-mzp6z\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-5v4bz\" (UID: \"fd59bf76-22af-4461-b023-45c86757140d\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-5v4bz" Sep 29 09:58:51 crc kubenswrapper[4779]: I0929 09:58:51.546397 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-5v4bz" Sep 29 09:58:52 crc kubenswrapper[4779]: I0929 09:58:52.098111 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-5v4bz"] Sep 29 09:58:52 crc kubenswrapper[4779]: I0929 09:58:52.154507 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-5v4bz" event={"ID":"fd59bf76-22af-4461-b023-45c86757140d","Type":"ContainerStarted","Data":"feabef81e66bd4d6e1759a1dff168e2ffd6430973e802f218cf35301b12a7fa7"} Sep 29 09:58:53 crc kubenswrapper[4779]: I0929 09:58:53.165596 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-5v4bz" event={"ID":"fd59bf76-22af-4461-b023-45c86757140d","Type":"ContainerStarted","Data":"39580d875f8ba9d5e7015fdd0ed0b6e17ac80582283868151765aaec01589500"} Sep 29 09:58:53 crc kubenswrapper[4779]: I0929 09:58:53.191319 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-5v4bz" podStartSLOduration=1.688282262 podStartE2EDuration="2.191300611s" podCreationTimestamp="2025-09-29 09:58:51 +0000 UTC" firstStartedPulling="2025-09-29 09:58:52.1046288 +0000 UTC m=+1764.085952714" lastFinishedPulling="2025-09-29 09:58:52.607647159 +0000 UTC m=+1764.588971063" observedRunningTime="2025-09-29 09:58:53.184522724 +0000 UTC m=+1765.165846628" watchObservedRunningTime="2025-09-29 09:58:53.191300611 +0000 UTC m=+1765.172624515" Sep 29 09:58:55 crc kubenswrapper[4779]: I0929 09:58:55.714531 4779 scope.go:117] "RemoveContainer" containerID="e12405b2f177552bd75b0aa0910938d5b67c3d7c2041800b3176b0508ac82f97" Sep 29 09:58:55 crc kubenswrapper[4779]: E0929 09:58:55.715248 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 09:59:02 crc kubenswrapper[4779]: I0929 09:59:02.043594 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-p6zkw"] Sep 29 09:59:02 crc kubenswrapper[4779]: I0929 09:59:02.052874 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-p6zkw"] Sep 29 09:59:02 crc kubenswrapper[4779]: I0929 09:59:02.724869 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="db25293b-28ae-40dc-a75f-86de34677919" path="/var/lib/kubelet/pods/db25293b-28ae-40dc-a75f-86de34677919/volumes" Sep 29 09:59:03 crc kubenswrapper[4779]: I0929 09:59:03.028449 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-6s6xf"] Sep 29 09:59:03 crc kubenswrapper[4779]: I0929 09:59:03.043981 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-6s6xf"] Sep 29 09:59:03 crc kubenswrapper[4779]: I0929 09:59:03.265836 4779 generic.go:334] "Generic (PLEG): container finished" podID="fd59bf76-22af-4461-b023-45c86757140d" containerID="39580d875f8ba9d5e7015fdd0ed0b6e17ac80582283868151765aaec01589500" exitCode=0 Sep 29 09:59:03 crc kubenswrapper[4779]: I0929 09:59:03.265890 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-5v4bz" event={"ID":"fd59bf76-22af-4461-b023-45c86757140d","Type":"ContainerDied","Data":"39580d875f8ba9d5e7015fdd0ed0b6e17ac80582283868151765aaec01589500"} Sep 29 09:59:04 crc kubenswrapper[4779]: I0929 09:59:04.681092 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-5v4bz" Sep 29 09:59:04 crc kubenswrapper[4779]: I0929 09:59:04.721546 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fd59bf76-22af-4461-b023-45c86757140d-inventory\") pod \"fd59bf76-22af-4461-b023-45c86757140d\" (UID: \"fd59bf76-22af-4461-b023-45c86757140d\") " Sep 29 09:59:04 crc kubenswrapper[4779]: I0929 09:59:04.721597 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fd59bf76-22af-4461-b023-45c86757140d-ssh-key\") pod \"fd59bf76-22af-4461-b023-45c86757140d\" (UID: \"fd59bf76-22af-4461-b023-45c86757140d\") " Sep 29 09:59:04 crc kubenswrapper[4779]: I0929 09:59:04.721765 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mzp6z\" (UniqueName: \"kubernetes.io/projected/fd59bf76-22af-4461-b023-45c86757140d-kube-api-access-mzp6z\") pod \"fd59bf76-22af-4461-b023-45c86757140d\" (UID: \"fd59bf76-22af-4461-b023-45c86757140d\") " Sep 29 09:59:04 crc kubenswrapper[4779]: I0929 09:59:04.727751 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="32f92928-6ba4-4824-94ac-8e20efe26e67" path="/var/lib/kubelet/pods/32f92928-6ba4-4824-94ac-8e20efe26e67/volumes" Sep 29 09:59:04 crc kubenswrapper[4779]: I0929 09:59:04.732057 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd59bf76-22af-4461-b023-45c86757140d-kube-api-access-mzp6z" (OuterVolumeSpecName: "kube-api-access-mzp6z") pod "fd59bf76-22af-4461-b023-45c86757140d" (UID: "fd59bf76-22af-4461-b023-45c86757140d"). InnerVolumeSpecName "kube-api-access-mzp6z". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:59:04 crc kubenswrapper[4779]: I0929 09:59:04.755180 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd59bf76-22af-4461-b023-45c86757140d-inventory" (OuterVolumeSpecName: "inventory") pod "fd59bf76-22af-4461-b023-45c86757140d" (UID: "fd59bf76-22af-4461-b023-45c86757140d"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:59:04 crc kubenswrapper[4779]: I0929 09:59:04.764170 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd59bf76-22af-4461-b023-45c86757140d-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "fd59bf76-22af-4461-b023-45c86757140d" (UID: "fd59bf76-22af-4461-b023-45c86757140d"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:59:04 crc kubenswrapper[4779]: I0929 09:59:04.824926 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mzp6z\" (UniqueName: \"kubernetes.io/projected/fd59bf76-22af-4461-b023-45c86757140d-kube-api-access-mzp6z\") on node \"crc\" DevicePath \"\"" Sep 29 09:59:04 crc kubenswrapper[4779]: I0929 09:59:04.824962 4779 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fd59bf76-22af-4461-b023-45c86757140d-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 09:59:04 crc kubenswrapper[4779]: I0929 09:59:04.824974 4779 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fd59bf76-22af-4461-b023-45c86757140d-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 09:59:05 crc kubenswrapper[4779]: I0929 09:59:05.285262 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-5v4bz" event={"ID":"fd59bf76-22af-4461-b023-45c86757140d","Type":"ContainerDied","Data":"feabef81e66bd4d6e1759a1dff168e2ffd6430973e802f218cf35301b12a7fa7"} Sep 29 09:59:05 crc kubenswrapper[4779]: I0929 09:59:05.285298 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="feabef81e66bd4d6e1759a1dff168e2ffd6430973e802f218cf35301b12a7fa7" Sep 29 09:59:05 crc kubenswrapper[4779]: I0929 09:59:05.285365 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-5v4bz" Sep 29 09:59:05 crc kubenswrapper[4779]: I0929 09:59:05.394585 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zqmkz"] Sep 29 09:59:05 crc kubenswrapper[4779]: E0929 09:59:05.394985 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd59bf76-22af-4461-b023-45c86757140d" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Sep 29 09:59:05 crc kubenswrapper[4779]: I0929 09:59:05.395006 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd59bf76-22af-4461-b023-45c86757140d" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Sep 29 09:59:05 crc kubenswrapper[4779]: I0929 09:59:05.395230 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd59bf76-22af-4461-b023-45c86757140d" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Sep 29 09:59:05 crc kubenswrapper[4779]: I0929 09:59:05.395961 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zqmkz" Sep 29 09:59:05 crc kubenswrapper[4779]: I0929 09:59:05.397919 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-l7nsb" Sep 29 09:59:05 crc kubenswrapper[4779]: I0929 09:59:05.398344 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 09:59:05 crc kubenswrapper[4779]: I0929 09:59:05.399069 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 09:59:05 crc kubenswrapper[4779]: I0929 09:59:05.404564 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 09:59:05 crc kubenswrapper[4779]: I0929 09:59:05.436341 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d4c6aca3-1090-4ea5-b3ed-04c11ac7c455-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-zqmkz\" (UID: \"d4c6aca3-1090-4ea5-b3ed-04c11ac7c455\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zqmkz" Sep 29 09:59:05 crc kubenswrapper[4779]: I0929 09:59:05.436387 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dd8v6\" (UniqueName: \"kubernetes.io/projected/d4c6aca3-1090-4ea5-b3ed-04c11ac7c455-kube-api-access-dd8v6\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-zqmkz\" (UID: \"d4c6aca3-1090-4ea5-b3ed-04c11ac7c455\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zqmkz" Sep 29 09:59:05 crc kubenswrapper[4779]: I0929 09:59:05.436428 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d4c6aca3-1090-4ea5-b3ed-04c11ac7c455-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-zqmkz\" (UID: \"d4c6aca3-1090-4ea5-b3ed-04c11ac7c455\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zqmkz" Sep 29 09:59:05 crc kubenswrapper[4779]: I0929 09:59:05.454793 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zqmkz"] Sep 29 09:59:05 crc kubenswrapper[4779]: I0929 09:59:05.537401 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d4c6aca3-1090-4ea5-b3ed-04c11ac7c455-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-zqmkz\" (UID: \"d4c6aca3-1090-4ea5-b3ed-04c11ac7c455\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zqmkz" Sep 29 09:59:05 crc kubenswrapper[4779]: I0929 09:59:05.537555 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d4c6aca3-1090-4ea5-b3ed-04c11ac7c455-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-zqmkz\" (UID: \"d4c6aca3-1090-4ea5-b3ed-04c11ac7c455\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zqmkz" Sep 29 09:59:05 crc kubenswrapper[4779]: I0929 09:59:05.537576 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dd8v6\" (UniqueName: \"kubernetes.io/projected/d4c6aca3-1090-4ea5-b3ed-04c11ac7c455-kube-api-access-dd8v6\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-zqmkz\" (UID: \"d4c6aca3-1090-4ea5-b3ed-04c11ac7c455\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zqmkz" Sep 29 09:59:05 crc kubenswrapper[4779]: I0929 09:59:05.540874 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d4c6aca3-1090-4ea5-b3ed-04c11ac7c455-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-zqmkz\" (UID: \"d4c6aca3-1090-4ea5-b3ed-04c11ac7c455\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zqmkz" Sep 29 09:59:05 crc kubenswrapper[4779]: I0929 09:59:05.541364 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d4c6aca3-1090-4ea5-b3ed-04c11ac7c455-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-zqmkz\" (UID: \"d4c6aca3-1090-4ea5-b3ed-04c11ac7c455\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zqmkz" Sep 29 09:59:05 crc kubenswrapper[4779]: I0929 09:59:05.556090 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dd8v6\" (UniqueName: \"kubernetes.io/projected/d4c6aca3-1090-4ea5-b3ed-04c11ac7c455-kube-api-access-dd8v6\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-zqmkz\" (UID: \"d4c6aca3-1090-4ea5-b3ed-04c11ac7c455\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zqmkz" Sep 29 09:59:05 crc kubenswrapper[4779]: I0929 09:59:05.711233 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zqmkz" Sep 29 09:59:06 crc kubenswrapper[4779]: I0929 09:59:06.278858 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zqmkz"] Sep 29 09:59:06 crc kubenswrapper[4779]: W0929 09:59:06.284999 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd4c6aca3_1090_4ea5_b3ed_04c11ac7c455.slice/crio-0966b1d1d5250d200b85f226dcacfb6668bde977bf01fe1774e6593499c7d2c0 WatchSource:0}: Error finding container 0966b1d1d5250d200b85f226dcacfb6668bde977bf01fe1774e6593499c7d2c0: Status 404 returned error can't find the container with id 0966b1d1d5250d200b85f226dcacfb6668bde977bf01fe1774e6593499c7d2c0 Sep 29 09:59:06 crc kubenswrapper[4779]: I0929 09:59:06.294792 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zqmkz" event={"ID":"d4c6aca3-1090-4ea5-b3ed-04c11ac7c455","Type":"ContainerStarted","Data":"0966b1d1d5250d200b85f226dcacfb6668bde977bf01fe1774e6593499c7d2c0"} Sep 29 09:59:07 crc kubenswrapper[4779]: I0929 09:59:07.305134 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zqmkz" event={"ID":"d4c6aca3-1090-4ea5-b3ed-04c11ac7c455","Type":"ContainerStarted","Data":"e259da7b53f71a5e012ae25a260864a5614b1d8d26dc67744da478a709c30584"} Sep 29 09:59:07 crc kubenswrapper[4779]: I0929 09:59:07.333933 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zqmkz" podStartSLOduration=1.917616354 podStartE2EDuration="2.333877462s" podCreationTimestamp="2025-09-29 09:59:05 +0000 UTC" firstStartedPulling="2025-09-29 09:59:06.288144791 +0000 UTC m=+1778.269468695" lastFinishedPulling="2025-09-29 09:59:06.704405899 +0000 UTC m=+1778.685729803" observedRunningTime="2025-09-29 09:59:07.32588695 +0000 UTC m=+1779.307210864" watchObservedRunningTime="2025-09-29 09:59:07.333877462 +0000 UTC m=+1779.315201406" Sep 29 09:59:09 crc kubenswrapper[4779]: I0929 09:59:09.713917 4779 scope.go:117] "RemoveContainer" containerID="e12405b2f177552bd75b0aa0910938d5b67c3d7c2041800b3176b0508ac82f97" Sep 29 09:59:09 crc kubenswrapper[4779]: E0929 09:59:09.714484 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 09:59:17 crc kubenswrapper[4779]: I0929 09:59:17.387266 4779 generic.go:334] "Generic (PLEG): container finished" podID="d4c6aca3-1090-4ea5-b3ed-04c11ac7c455" containerID="e259da7b53f71a5e012ae25a260864a5614b1d8d26dc67744da478a709c30584" exitCode=0 Sep 29 09:59:17 crc kubenswrapper[4779]: I0929 09:59:17.387391 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zqmkz" event={"ID":"d4c6aca3-1090-4ea5-b3ed-04c11ac7c455","Type":"ContainerDied","Data":"e259da7b53f71a5e012ae25a260864a5614b1d8d26dc67744da478a709c30584"} Sep 29 09:59:18 crc kubenswrapper[4779]: I0929 09:59:18.916170 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zqmkz" Sep 29 09:59:19 crc kubenswrapper[4779]: I0929 09:59:19.087500 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d4c6aca3-1090-4ea5-b3ed-04c11ac7c455-ssh-key\") pod \"d4c6aca3-1090-4ea5-b3ed-04c11ac7c455\" (UID: \"d4c6aca3-1090-4ea5-b3ed-04c11ac7c455\") " Sep 29 09:59:19 crc kubenswrapper[4779]: I0929 09:59:19.087573 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dd8v6\" (UniqueName: \"kubernetes.io/projected/d4c6aca3-1090-4ea5-b3ed-04c11ac7c455-kube-api-access-dd8v6\") pod \"d4c6aca3-1090-4ea5-b3ed-04c11ac7c455\" (UID: \"d4c6aca3-1090-4ea5-b3ed-04c11ac7c455\") " Sep 29 09:59:19 crc kubenswrapper[4779]: I0929 09:59:19.087643 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d4c6aca3-1090-4ea5-b3ed-04c11ac7c455-inventory\") pod \"d4c6aca3-1090-4ea5-b3ed-04c11ac7c455\" (UID: \"d4c6aca3-1090-4ea5-b3ed-04c11ac7c455\") " Sep 29 09:59:19 crc kubenswrapper[4779]: I0929 09:59:19.093358 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4c6aca3-1090-4ea5-b3ed-04c11ac7c455-kube-api-access-dd8v6" (OuterVolumeSpecName: "kube-api-access-dd8v6") pod "d4c6aca3-1090-4ea5-b3ed-04c11ac7c455" (UID: "d4c6aca3-1090-4ea5-b3ed-04c11ac7c455"). InnerVolumeSpecName "kube-api-access-dd8v6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 09:59:19 crc kubenswrapper[4779]: I0929 09:59:19.115053 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4c6aca3-1090-4ea5-b3ed-04c11ac7c455-inventory" (OuterVolumeSpecName: "inventory") pod "d4c6aca3-1090-4ea5-b3ed-04c11ac7c455" (UID: "d4c6aca3-1090-4ea5-b3ed-04c11ac7c455"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:59:19 crc kubenswrapper[4779]: I0929 09:59:19.115074 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4c6aca3-1090-4ea5-b3ed-04c11ac7c455-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "d4c6aca3-1090-4ea5-b3ed-04c11ac7c455" (UID: "d4c6aca3-1090-4ea5-b3ed-04c11ac7c455"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 09:59:19 crc kubenswrapper[4779]: I0929 09:59:19.190994 4779 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d4c6aca3-1090-4ea5-b3ed-04c11ac7c455-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 09:59:19 crc kubenswrapper[4779]: I0929 09:59:19.191029 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dd8v6\" (UniqueName: \"kubernetes.io/projected/d4c6aca3-1090-4ea5-b3ed-04c11ac7c455-kube-api-access-dd8v6\") on node \"crc\" DevicePath \"\"" Sep 29 09:59:19 crc kubenswrapper[4779]: I0929 09:59:19.191046 4779 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d4c6aca3-1090-4ea5-b3ed-04c11ac7c455-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 09:59:19 crc kubenswrapper[4779]: I0929 09:59:19.424359 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zqmkz" event={"ID":"d4c6aca3-1090-4ea5-b3ed-04c11ac7c455","Type":"ContainerDied","Data":"0966b1d1d5250d200b85f226dcacfb6668bde977bf01fe1774e6593499c7d2c0"} Sep 29 09:59:19 crc kubenswrapper[4779]: I0929 09:59:19.424427 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0966b1d1d5250d200b85f226dcacfb6668bde977bf01fe1774e6593499c7d2c0" Sep 29 09:59:19 crc kubenswrapper[4779]: I0929 09:59:19.424451 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zqmkz" Sep 29 09:59:24 crc kubenswrapper[4779]: I0929 09:59:24.714444 4779 scope.go:117] "RemoveContainer" containerID="e12405b2f177552bd75b0aa0910938d5b67c3d7c2041800b3176b0508ac82f97" Sep 29 09:59:24 crc kubenswrapper[4779]: E0929 09:59:24.715231 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 09:59:35 crc kubenswrapper[4779]: I0929 09:59:35.714927 4779 scope.go:117] "RemoveContainer" containerID="e12405b2f177552bd75b0aa0910938d5b67c3d7c2041800b3176b0508ac82f97" Sep 29 09:59:35 crc kubenswrapper[4779]: E0929 09:59:35.716162 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 09:59:39 crc kubenswrapper[4779]: I0929 09:59:39.599282 4779 scope.go:117] "RemoveContainer" containerID="8b8b9cc7488074b98f47def0550430ef1ae160698979702264ad74e926ac6fe0" Sep 29 09:59:39 crc kubenswrapper[4779]: I0929 09:59:39.650245 4779 scope.go:117] "RemoveContainer" containerID="42175e97ef30e091bd98a7f085d98538d528664c6d59cf720074da433264fe75" Sep 29 09:59:39 crc kubenswrapper[4779]: I0929 09:59:39.725491 4779 scope.go:117] "RemoveContainer" containerID="c8f3399cd4b08d51e0415f62410d0e9b9bd65963ad041a47277217410d338f13" Sep 29 09:59:46 crc kubenswrapper[4779]: I0929 09:59:46.039047 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-2hnns"] Sep 29 09:59:46 crc kubenswrapper[4779]: I0929 09:59:46.047375 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-2hnns"] Sep 29 09:59:46 crc kubenswrapper[4779]: I0929 09:59:46.728838 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="152ec30c-7a74-4fcd-a062-52f60d17756e" path="/var/lib/kubelet/pods/152ec30c-7a74-4fcd-a062-52f60d17756e/volumes" Sep 29 09:59:49 crc kubenswrapper[4779]: I0929 09:59:49.713705 4779 scope.go:117] "RemoveContainer" containerID="e12405b2f177552bd75b0aa0910938d5b67c3d7c2041800b3176b0508ac82f97" Sep 29 09:59:49 crc kubenswrapper[4779]: E0929 09:59:49.714757 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:00:00 crc kubenswrapper[4779]: I0929 10:00:00.153181 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319000-q2qmm"] Sep 29 10:00:00 crc kubenswrapper[4779]: E0929 10:00:00.154215 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4c6aca3-1090-4ea5-b3ed-04c11ac7c455" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Sep 29 10:00:00 crc kubenswrapper[4779]: I0929 10:00:00.154235 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4c6aca3-1090-4ea5-b3ed-04c11ac7c455" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Sep 29 10:00:00 crc kubenswrapper[4779]: I0929 10:00:00.154454 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4c6aca3-1090-4ea5-b3ed-04c11ac7c455" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Sep 29 10:00:00 crc kubenswrapper[4779]: I0929 10:00:00.155288 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319000-q2qmm" Sep 29 10:00:00 crc kubenswrapper[4779]: I0929 10:00:00.157987 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 29 10:00:00 crc kubenswrapper[4779]: I0929 10:00:00.158144 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 29 10:00:00 crc kubenswrapper[4779]: I0929 10:00:00.162097 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319000-q2qmm"] Sep 29 10:00:00 crc kubenswrapper[4779]: I0929 10:00:00.313176 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-54mdf\" (UniqueName: \"kubernetes.io/projected/7ff3152a-f2c7-4dd6-aa23-f45ffb5f8a35-kube-api-access-54mdf\") pod \"collect-profiles-29319000-q2qmm\" (UID: \"7ff3152a-f2c7-4dd6-aa23-f45ffb5f8a35\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319000-q2qmm" Sep 29 10:00:00 crc kubenswrapper[4779]: I0929 10:00:00.313234 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7ff3152a-f2c7-4dd6-aa23-f45ffb5f8a35-config-volume\") pod \"collect-profiles-29319000-q2qmm\" (UID: \"7ff3152a-f2c7-4dd6-aa23-f45ffb5f8a35\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319000-q2qmm" Sep 29 10:00:00 crc kubenswrapper[4779]: I0929 10:00:00.313309 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7ff3152a-f2c7-4dd6-aa23-f45ffb5f8a35-secret-volume\") pod \"collect-profiles-29319000-q2qmm\" (UID: \"7ff3152a-f2c7-4dd6-aa23-f45ffb5f8a35\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319000-q2qmm" Sep 29 10:00:00 crc kubenswrapper[4779]: I0929 10:00:00.415860 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-54mdf\" (UniqueName: \"kubernetes.io/projected/7ff3152a-f2c7-4dd6-aa23-f45ffb5f8a35-kube-api-access-54mdf\") pod \"collect-profiles-29319000-q2qmm\" (UID: \"7ff3152a-f2c7-4dd6-aa23-f45ffb5f8a35\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319000-q2qmm" Sep 29 10:00:00 crc kubenswrapper[4779]: I0929 10:00:00.415995 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7ff3152a-f2c7-4dd6-aa23-f45ffb5f8a35-config-volume\") pod \"collect-profiles-29319000-q2qmm\" (UID: \"7ff3152a-f2c7-4dd6-aa23-f45ffb5f8a35\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319000-q2qmm" Sep 29 10:00:00 crc kubenswrapper[4779]: I0929 10:00:00.416105 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7ff3152a-f2c7-4dd6-aa23-f45ffb5f8a35-secret-volume\") pod \"collect-profiles-29319000-q2qmm\" (UID: \"7ff3152a-f2c7-4dd6-aa23-f45ffb5f8a35\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319000-q2qmm" Sep 29 10:00:00 crc kubenswrapper[4779]: I0929 10:00:00.417571 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7ff3152a-f2c7-4dd6-aa23-f45ffb5f8a35-config-volume\") pod \"collect-profiles-29319000-q2qmm\" (UID: \"7ff3152a-f2c7-4dd6-aa23-f45ffb5f8a35\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319000-q2qmm" Sep 29 10:00:00 crc kubenswrapper[4779]: I0929 10:00:00.423338 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7ff3152a-f2c7-4dd6-aa23-f45ffb5f8a35-secret-volume\") pod \"collect-profiles-29319000-q2qmm\" (UID: \"7ff3152a-f2c7-4dd6-aa23-f45ffb5f8a35\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319000-q2qmm" Sep 29 10:00:00 crc kubenswrapper[4779]: I0929 10:00:00.440706 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-54mdf\" (UniqueName: \"kubernetes.io/projected/7ff3152a-f2c7-4dd6-aa23-f45ffb5f8a35-kube-api-access-54mdf\") pod \"collect-profiles-29319000-q2qmm\" (UID: \"7ff3152a-f2c7-4dd6-aa23-f45ffb5f8a35\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319000-q2qmm" Sep 29 10:00:00 crc kubenswrapper[4779]: I0929 10:00:00.472693 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319000-q2qmm" Sep 29 10:00:00 crc kubenswrapper[4779]: I0929 10:00:00.958323 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319000-q2qmm"] Sep 29 10:00:01 crc kubenswrapper[4779]: I0929 10:00:01.864231 4779 generic.go:334] "Generic (PLEG): container finished" podID="7ff3152a-f2c7-4dd6-aa23-f45ffb5f8a35" containerID="e69ecf402440ab6cd373a336f634be27c89048d268149f96ecc0daddce5dcc86" exitCode=0 Sep 29 10:00:01 crc kubenswrapper[4779]: I0929 10:00:01.864327 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319000-q2qmm" event={"ID":"7ff3152a-f2c7-4dd6-aa23-f45ffb5f8a35","Type":"ContainerDied","Data":"e69ecf402440ab6cd373a336f634be27c89048d268149f96ecc0daddce5dcc86"} Sep 29 10:00:01 crc kubenswrapper[4779]: I0929 10:00:01.865245 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319000-q2qmm" event={"ID":"7ff3152a-f2c7-4dd6-aa23-f45ffb5f8a35","Type":"ContainerStarted","Data":"1c786c70d203cf9510c8217fa1779758bfb4effa970c4fe590a1c7234366f239"} Sep 29 10:00:02 crc kubenswrapper[4779]: I0929 10:00:02.715222 4779 scope.go:117] "RemoveContainer" containerID="e12405b2f177552bd75b0aa0910938d5b67c3d7c2041800b3176b0508ac82f97" Sep 29 10:00:02 crc kubenswrapper[4779]: E0929 10:00:02.715605 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:00:03 crc kubenswrapper[4779]: I0929 10:00:03.346274 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319000-q2qmm" Sep 29 10:00:03 crc kubenswrapper[4779]: I0929 10:00:03.493601 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7ff3152a-f2c7-4dd6-aa23-f45ffb5f8a35-config-volume\") pod \"7ff3152a-f2c7-4dd6-aa23-f45ffb5f8a35\" (UID: \"7ff3152a-f2c7-4dd6-aa23-f45ffb5f8a35\") " Sep 29 10:00:03 crc kubenswrapper[4779]: I0929 10:00:03.493878 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-54mdf\" (UniqueName: \"kubernetes.io/projected/7ff3152a-f2c7-4dd6-aa23-f45ffb5f8a35-kube-api-access-54mdf\") pod \"7ff3152a-f2c7-4dd6-aa23-f45ffb5f8a35\" (UID: \"7ff3152a-f2c7-4dd6-aa23-f45ffb5f8a35\") " Sep 29 10:00:03 crc kubenswrapper[4779]: I0929 10:00:03.493901 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7ff3152a-f2c7-4dd6-aa23-f45ffb5f8a35-secret-volume\") pod \"7ff3152a-f2c7-4dd6-aa23-f45ffb5f8a35\" (UID: \"7ff3152a-f2c7-4dd6-aa23-f45ffb5f8a35\") " Sep 29 10:00:03 crc kubenswrapper[4779]: I0929 10:00:03.494778 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7ff3152a-f2c7-4dd6-aa23-f45ffb5f8a35-config-volume" (OuterVolumeSpecName: "config-volume") pod "7ff3152a-f2c7-4dd6-aa23-f45ffb5f8a35" (UID: "7ff3152a-f2c7-4dd6-aa23-f45ffb5f8a35"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 10:00:03 crc kubenswrapper[4779]: I0929 10:00:03.499344 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ff3152a-f2c7-4dd6-aa23-f45ffb5f8a35-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "7ff3152a-f2c7-4dd6-aa23-f45ffb5f8a35" (UID: "7ff3152a-f2c7-4dd6-aa23-f45ffb5f8a35"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:00:03 crc kubenswrapper[4779]: I0929 10:00:03.499561 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7ff3152a-f2c7-4dd6-aa23-f45ffb5f8a35-kube-api-access-54mdf" (OuterVolumeSpecName: "kube-api-access-54mdf") pod "7ff3152a-f2c7-4dd6-aa23-f45ffb5f8a35" (UID: "7ff3152a-f2c7-4dd6-aa23-f45ffb5f8a35"). InnerVolumeSpecName "kube-api-access-54mdf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 10:00:03 crc kubenswrapper[4779]: I0929 10:00:03.596936 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-54mdf\" (UniqueName: \"kubernetes.io/projected/7ff3152a-f2c7-4dd6-aa23-f45ffb5f8a35-kube-api-access-54mdf\") on node \"crc\" DevicePath \"\"" Sep 29 10:00:03 crc kubenswrapper[4779]: I0929 10:00:03.597018 4779 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7ff3152a-f2c7-4dd6-aa23-f45ffb5f8a35-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 29 10:00:03 crc kubenswrapper[4779]: I0929 10:00:03.597032 4779 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7ff3152a-f2c7-4dd6-aa23-f45ffb5f8a35-config-volume\") on node \"crc\" DevicePath \"\"" Sep 29 10:00:03 crc kubenswrapper[4779]: I0929 10:00:03.886030 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319000-q2qmm" event={"ID":"7ff3152a-f2c7-4dd6-aa23-f45ffb5f8a35","Type":"ContainerDied","Data":"1c786c70d203cf9510c8217fa1779758bfb4effa970c4fe590a1c7234366f239"} Sep 29 10:00:03 crc kubenswrapper[4779]: I0929 10:00:03.886067 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1c786c70d203cf9510c8217fa1779758bfb4effa970c4fe590a1c7234366f239" Sep 29 10:00:03 crc kubenswrapper[4779]: I0929 10:00:03.886130 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319000-q2qmm" Sep 29 10:00:14 crc kubenswrapper[4779]: I0929 10:00:14.715461 4779 scope.go:117] "RemoveContainer" containerID="e12405b2f177552bd75b0aa0910938d5b67c3d7c2041800b3176b0508ac82f97" Sep 29 10:00:14 crc kubenswrapper[4779]: E0929 10:00:14.716646 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:00:25 crc kubenswrapper[4779]: I0929 10:00:25.717572 4779 scope.go:117] "RemoveContainer" containerID="e12405b2f177552bd75b0aa0910938d5b67c3d7c2041800b3176b0508ac82f97" Sep 29 10:00:25 crc kubenswrapper[4779]: E0929 10:00:25.719246 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:00:36 crc kubenswrapper[4779]: I0929 10:00:36.715128 4779 scope.go:117] "RemoveContainer" containerID="e12405b2f177552bd75b0aa0910938d5b67c3d7c2041800b3176b0508ac82f97" Sep 29 10:00:36 crc kubenswrapper[4779]: E0929 10:00:36.715846 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:00:39 crc kubenswrapper[4779]: I0929 10:00:39.833417 4779 scope.go:117] "RemoveContainer" containerID="58a0cc1a64a0c547a70655d8d911d26f7464b3add82dc9910d0a2a9de01e72d5" Sep 29 10:00:49 crc kubenswrapper[4779]: I0929 10:00:49.717526 4779 scope.go:117] "RemoveContainer" containerID="e12405b2f177552bd75b0aa0910938d5b67c3d7c2041800b3176b0508ac82f97" Sep 29 10:00:49 crc kubenswrapper[4779]: E0929 10:00:49.718515 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:01:00 crc kubenswrapper[4779]: I0929 10:01:00.147934 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29319001-ssfsw"] Sep 29 10:01:00 crc kubenswrapper[4779]: E0929 10:01:00.149099 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ff3152a-f2c7-4dd6-aa23-f45ffb5f8a35" containerName="collect-profiles" Sep 29 10:01:00 crc kubenswrapper[4779]: I0929 10:01:00.149117 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ff3152a-f2c7-4dd6-aa23-f45ffb5f8a35" containerName="collect-profiles" Sep 29 10:01:00 crc kubenswrapper[4779]: I0929 10:01:00.149333 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ff3152a-f2c7-4dd6-aa23-f45ffb5f8a35" containerName="collect-profiles" Sep 29 10:01:00 crc kubenswrapper[4779]: I0929 10:01:00.150138 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29319001-ssfsw" Sep 29 10:01:00 crc kubenswrapper[4779]: I0929 10:01:00.160579 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29319001-ssfsw"] Sep 29 10:01:00 crc kubenswrapper[4779]: I0929 10:01:00.241375 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/cc77a3e0-fac1-4945-a706-d4d9fc0a209f-fernet-keys\") pod \"keystone-cron-29319001-ssfsw\" (UID: \"cc77a3e0-fac1-4945-a706-d4d9fc0a209f\") " pod="openstack/keystone-cron-29319001-ssfsw" Sep 29 10:01:00 crc kubenswrapper[4779]: I0929 10:01:00.241439 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l8k77\" (UniqueName: \"kubernetes.io/projected/cc77a3e0-fac1-4945-a706-d4d9fc0a209f-kube-api-access-l8k77\") pod \"keystone-cron-29319001-ssfsw\" (UID: \"cc77a3e0-fac1-4945-a706-d4d9fc0a209f\") " pod="openstack/keystone-cron-29319001-ssfsw" Sep 29 10:01:00 crc kubenswrapper[4779]: I0929 10:01:00.241577 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc77a3e0-fac1-4945-a706-d4d9fc0a209f-config-data\") pod \"keystone-cron-29319001-ssfsw\" (UID: \"cc77a3e0-fac1-4945-a706-d4d9fc0a209f\") " pod="openstack/keystone-cron-29319001-ssfsw" Sep 29 10:01:00 crc kubenswrapper[4779]: I0929 10:01:00.241641 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc77a3e0-fac1-4945-a706-d4d9fc0a209f-combined-ca-bundle\") pod \"keystone-cron-29319001-ssfsw\" (UID: \"cc77a3e0-fac1-4945-a706-d4d9fc0a209f\") " pod="openstack/keystone-cron-29319001-ssfsw" Sep 29 10:01:00 crc kubenswrapper[4779]: I0929 10:01:00.343968 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc77a3e0-fac1-4945-a706-d4d9fc0a209f-combined-ca-bundle\") pod \"keystone-cron-29319001-ssfsw\" (UID: \"cc77a3e0-fac1-4945-a706-d4d9fc0a209f\") " pod="openstack/keystone-cron-29319001-ssfsw" Sep 29 10:01:00 crc kubenswrapper[4779]: I0929 10:01:00.344293 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/cc77a3e0-fac1-4945-a706-d4d9fc0a209f-fernet-keys\") pod \"keystone-cron-29319001-ssfsw\" (UID: \"cc77a3e0-fac1-4945-a706-d4d9fc0a209f\") " pod="openstack/keystone-cron-29319001-ssfsw" Sep 29 10:01:00 crc kubenswrapper[4779]: I0929 10:01:00.344333 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l8k77\" (UniqueName: \"kubernetes.io/projected/cc77a3e0-fac1-4945-a706-d4d9fc0a209f-kube-api-access-l8k77\") pod \"keystone-cron-29319001-ssfsw\" (UID: \"cc77a3e0-fac1-4945-a706-d4d9fc0a209f\") " pod="openstack/keystone-cron-29319001-ssfsw" Sep 29 10:01:00 crc kubenswrapper[4779]: I0929 10:01:00.344444 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc77a3e0-fac1-4945-a706-d4d9fc0a209f-config-data\") pod \"keystone-cron-29319001-ssfsw\" (UID: \"cc77a3e0-fac1-4945-a706-d4d9fc0a209f\") " pod="openstack/keystone-cron-29319001-ssfsw" Sep 29 10:01:00 crc kubenswrapper[4779]: I0929 10:01:00.349996 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/cc77a3e0-fac1-4945-a706-d4d9fc0a209f-fernet-keys\") pod \"keystone-cron-29319001-ssfsw\" (UID: \"cc77a3e0-fac1-4945-a706-d4d9fc0a209f\") " pod="openstack/keystone-cron-29319001-ssfsw" Sep 29 10:01:00 crc kubenswrapper[4779]: I0929 10:01:00.351819 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc77a3e0-fac1-4945-a706-d4d9fc0a209f-combined-ca-bundle\") pod \"keystone-cron-29319001-ssfsw\" (UID: \"cc77a3e0-fac1-4945-a706-d4d9fc0a209f\") " pod="openstack/keystone-cron-29319001-ssfsw" Sep 29 10:01:00 crc kubenswrapper[4779]: I0929 10:01:00.361416 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l8k77\" (UniqueName: \"kubernetes.io/projected/cc77a3e0-fac1-4945-a706-d4d9fc0a209f-kube-api-access-l8k77\") pod \"keystone-cron-29319001-ssfsw\" (UID: \"cc77a3e0-fac1-4945-a706-d4d9fc0a209f\") " pod="openstack/keystone-cron-29319001-ssfsw" Sep 29 10:01:00 crc kubenswrapper[4779]: I0929 10:01:00.362823 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc77a3e0-fac1-4945-a706-d4d9fc0a209f-config-data\") pod \"keystone-cron-29319001-ssfsw\" (UID: \"cc77a3e0-fac1-4945-a706-d4d9fc0a209f\") " pod="openstack/keystone-cron-29319001-ssfsw" Sep 29 10:01:00 crc kubenswrapper[4779]: I0929 10:01:00.474520 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29319001-ssfsw" Sep 29 10:01:00 crc kubenswrapper[4779]: I0929 10:01:00.894759 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29319001-ssfsw"] Sep 29 10:01:01 crc kubenswrapper[4779]: I0929 10:01:01.444827 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29319001-ssfsw" event={"ID":"cc77a3e0-fac1-4945-a706-d4d9fc0a209f","Type":"ContainerStarted","Data":"6307336962e3fc5c0d6aa9f7dd0ec59aea1a2f802236f3d94df7fde775891759"} Sep 29 10:01:01 crc kubenswrapper[4779]: I0929 10:01:01.445176 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29319001-ssfsw" event={"ID":"cc77a3e0-fac1-4945-a706-d4d9fc0a209f","Type":"ContainerStarted","Data":"db494d080629a9288a89323ae83f6c15dd058e8ee9f3d30d9685c514f4f6d1fe"} Sep 29 10:01:01 crc kubenswrapper[4779]: I0929 10:01:01.471204 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29319001-ssfsw" podStartSLOduration=1.471180547 podStartE2EDuration="1.471180547s" podCreationTimestamp="2025-09-29 10:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 10:01:01.460887628 +0000 UTC m=+1893.442211532" watchObservedRunningTime="2025-09-29 10:01:01.471180547 +0000 UTC m=+1893.452504451" Sep 29 10:01:03 crc kubenswrapper[4779]: I0929 10:01:03.714658 4779 scope.go:117] "RemoveContainer" containerID="e12405b2f177552bd75b0aa0910938d5b67c3d7c2041800b3176b0508ac82f97" Sep 29 10:01:03 crc kubenswrapper[4779]: E0929 10:01:03.715159 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:01:04 crc kubenswrapper[4779]: I0929 10:01:04.470080 4779 generic.go:334] "Generic (PLEG): container finished" podID="cc77a3e0-fac1-4945-a706-d4d9fc0a209f" containerID="6307336962e3fc5c0d6aa9f7dd0ec59aea1a2f802236f3d94df7fde775891759" exitCode=0 Sep 29 10:01:04 crc kubenswrapper[4779]: I0929 10:01:04.470182 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29319001-ssfsw" event={"ID":"cc77a3e0-fac1-4945-a706-d4d9fc0a209f","Type":"ContainerDied","Data":"6307336962e3fc5c0d6aa9f7dd0ec59aea1a2f802236f3d94df7fde775891759"} Sep 29 10:01:05 crc kubenswrapper[4779]: I0929 10:01:05.814142 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29319001-ssfsw" Sep 29 10:01:05 crc kubenswrapper[4779]: I0929 10:01:05.955589 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc77a3e0-fac1-4945-a706-d4d9fc0a209f-combined-ca-bundle\") pod \"cc77a3e0-fac1-4945-a706-d4d9fc0a209f\" (UID: \"cc77a3e0-fac1-4945-a706-d4d9fc0a209f\") " Sep 29 10:01:05 crc kubenswrapper[4779]: I0929 10:01:05.955727 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/cc77a3e0-fac1-4945-a706-d4d9fc0a209f-fernet-keys\") pod \"cc77a3e0-fac1-4945-a706-d4d9fc0a209f\" (UID: \"cc77a3e0-fac1-4945-a706-d4d9fc0a209f\") " Sep 29 10:01:05 crc kubenswrapper[4779]: I0929 10:01:05.955763 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l8k77\" (UniqueName: \"kubernetes.io/projected/cc77a3e0-fac1-4945-a706-d4d9fc0a209f-kube-api-access-l8k77\") pod \"cc77a3e0-fac1-4945-a706-d4d9fc0a209f\" (UID: \"cc77a3e0-fac1-4945-a706-d4d9fc0a209f\") " Sep 29 10:01:05 crc kubenswrapper[4779]: I0929 10:01:05.955779 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc77a3e0-fac1-4945-a706-d4d9fc0a209f-config-data\") pod \"cc77a3e0-fac1-4945-a706-d4d9fc0a209f\" (UID: \"cc77a3e0-fac1-4945-a706-d4d9fc0a209f\") " Sep 29 10:01:05 crc kubenswrapper[4779]: I0929 10:01:05.961589 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc77a3e0-fac1-4945-a706-d4d9fc0a209f-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "cc77a3e0-fac1-4945-a706-d4d9fc0a209f" (UID: "cc77a3e0-fac1-4945-a706-d4d9fc0a209f"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:01:05 crc kubenswrapper[4779]: I0929 10:01:05.961762 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cc77a3e0-fac1-4945-a706-d4d9fc0a209f-kube-api-access-l8k77" (OuterVolumeSpecName: "kube-api-access-l8k77") pod "cc77a3e0-fac1-4945-a706-d4d9fc0a209f" (UID: "cc77a3e0-fac1-4945-a706-d4d9fc0a209f"). InnerVolumeSpecName "kube-api-access-l8k77". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 10:01:05 crc kubenswrapper[4779]: I0929 10:01:05.984316 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc77a3e0-fac1-4945-a706-d4d9fc0a209f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cc77a3e0-fac1-4945-a706-d4d9fc0a209f" (UID: "cc77a3e0-fac1-4945-a706-d4d9fc0a209f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:01:06 crc kubenswrapper[4779]: I0929 10:01:06.017139 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc77a3e0-fac1-4945-a706-d4d9fc0a209f-config-data" (OuterVolumeSpecName: "config-data") pod "cc77a3e0-fac1-4945-a706-d4d9fc0a209f" (UID: "cc77a3e0-fac1-4945-a706-d4d9fc0a209f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:01:06 crc kubenswrapper[4779]: I0929 10:01:06.058611 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc77a3e0-fac1-4945-a706-d4d9fc0a209f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 10:01:06 crc kubenswrapper[4779]: I0929 10:01:06.058657 4779 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/cc77a3e0-fac1-4945-a706-d4d9fc0a209f-fernet-keys\") on node \"crc\" DevicePath \"\"" Sep 29 10:01:06 crc kubenswrapper[4779]: I0929 10:01:06.058671 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc77a3e0-fac1-4945-a706-d4d9fc0a209f-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 10:01:06 crc kubenswrapper[4779]: I0929 10:01:06.058687 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l8k77\" (UniqueName: \"kubernetes.io/projected/cc77a3e0-fac1-4945-a706-d4d9fc0a209f-kube-api-access-l8k77\") on node \"crc\" DevicePath \"\"" Sep 29 10:01:06 crc kubenswrapper[4779]: I0929 10:01:06.490938 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29319001-ssfsw" event={"ID":"cc77a3e0-fac1-4945-a706-d4d9fc0a209f","Type":"ContainerDied","Data":"db494d080629a9288a89323ae83f6c15dd058e8ee9f3d30d9685c514f4f6d1fe"} Sep 29 10:01:06 crc kubenswrapper[4779]: I0929 10:01:06.490980 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="db494d080629a9288a89323ae83f6c15dd058e8ee9f3d30d9685c514f4f6d1fe" Sep 29 10:01:06 crc kubenswrapper[4779]: I0929 10:01:06.491011 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29319001-ssfsw" Sep 29 10:01:17 crc kubenswrapper[4779]: I0929 10:01:17.715037 4779 scope.go:117] "RemoveContainer" containerID="e12405b2f177552bd75b0aa0910938d5b67c3d7c2041800b3176b0508ac82f97" Sep 29 10:01:17 crc kubenswrapper[4779]: E0929 10:01:17.716079 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:01:30 crc kubenswrapper[4779]: I0929 10:01:30.715505 4779 scope.go:117] "RemoveContainer" containerID="e12405b2f177552bd75b0aa0910938d5b67c3d7c2041800b3176b0508ac82f97" Sep 29 10:01:30 crc kubenswrapper[4779]: E0929 10:01:30.716561 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:01:45 crc kubenswrapper[4779]: I0929 10:01:45.715392 4779 scope.go:117] "RemoveContainer" containerID="e12405b2f177552bd75b0aa0910938d5b67c3d7c2041800b3176b0508ac82f97" Sep 29 10:01:45 crc kubenswrapper[4779]: E0929 10:01:45.716117 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:01:59 crc kubenswrapper[4779]: I0929 10:01:59.714147 4779 scope.go:117] "RemoveContainer" containerID="e12405b2f177552bd75b0aa0910938d5b67c3d7c2041800b3176b0508ac82f97" Sep 29 10:01:59 crc kubenswrapper[4779]: E0929 10:01:59.715247 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:02:13 crc kubenswrapper[4779]: I0929 10:02:13.714922 4779 scope.go:117] "RemoveContainer" containerID="e12405b2f177552bd75b0aa0910938d5b67c3d7c2041800b3176b0508ac82f97" Sep 29 10:02:13 crc kubenswrapper[4779]: E0929 10:02:13.715777 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:02:25 crc kubenswrapper[4779]: I0929 10:02:25.715479 4779 scope.go:117] "RemoveContainer" containerID="e12405b2f177552bd75b0aa0910938d5b67c3d7c2041800b3176b0508ac82f97" Sep 29 10:02:25 crc kubenswrapper[4779]: E0929 10:02:25.716651 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:02:39 crc kubenswrapper[4779]: I0929 10:02:39.714621 4779 scope.go:117] "RemoveContainer" containerID="e12405b2f177552bd75b0aa0910938d5b67c3d7c2041800b3176b0508ac82f97" Sep 29 10:02:39 crc kubenswrapper[4779]: E0929 10:02:39.715401 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:02:53 crc kubenswrapper[4779]: I0929 10:02:53.714554 4779 scope.go:117] "RemoveContainer" containerID="e12405b2f177552bd75b0aa0910938d5b67c3d7c2041800b3176b0508ac82f97" Sep 29 10:02:54 crc kubenswrapper[4779]: I0929 10:02:54.561810 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" event={"ID":"f1a5d3a7-37d9-4a87-864c-e4af7f504a19","Type":"ContainerStarted","Data":"2ea7c57f54acd206248deed2c0656374ecfa2983e6d9220e44a227622fb5b4f8"} Sep 29 10:03:51 crc kubenswrapper[4779]: I0929 10:03:51.042445 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-vssrx"] Sep 29 10:03:51 crc kubenswrapper[4779]: E0929 10:03:51.043795 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc77a3e0-fac1-4945-a706-d4d9fc0a209f" containerName="keystone-cron" Sep 29 10:03:51 crc kubenswrapper[4779]: I0929 10:03:51.043821 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc77a3e0-fac1-4945-a706-d4d9fc0a209f" containerName="keystone-cron" Sep 29 10:03:51 crc kubenswrapper[4779]: I0929 10:03:51.044279 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc77a3e0-fac1-4945-a706-d4d9fc0a209f" containerName="keystone-cron" Sep 29 10:03:51 crc kubenswrapper[4779]: I0929 10:03:51.046648 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vssrx" Sep 29 10:03:51 crc kubenswrapper[4779]: I0929 10:03:51.055840 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vssrx"] Sep 29 10:03:51 crc kubenswrapper[4779]: I0929 10:03:51.155777 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2klpf\" (UniqueName: \"kubernetes.io/projected/9c124885-d64e-4e93-ba92-0f409e1a9c51-kube-api-access-2klpf\") pod \"community-operators-vssrx\" (UID: \"9c124885-d64e-4e93-ba92-0f409e1a9c51\") " pod="openshift-marketplace/community-operators-vssrx" Sep 29 10:03:51 crc kubenswrapper[4779]: I0929 10:03:51.155824 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c124885-d64e-4e93-ba92-0f409e1a9c51-utilities\") pod \"community-operators-vssrx\" (UID: \"9c124885-d64e-4e93-ba92-0f409e1a9c51\") " pod="openshift-marketplace/community-operators-vssrx" Sep 29 10:03:51 crc kubenswrapper[4779]: I0929 10:03:51.156085 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c124885-d64e-4e93-ba92-0f409e1a9c51-catalog-content\") pod \"community-operators-vssrx\" (UID: \"9c124885-d64e-4e93-ba92-0f409e1a9c51\") " pod="openshift-marketplace/community-operators-vssrx" Sep 29 10:03:51 crc kubenswrapper[4779]: I0929 10:03:51.258231 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2klpf\" (UniqueName: \"kubernetes.io/projected/9c124885-d64e-4e93-ba92-0f409e1a9c51-kube-api-access-2klpf\") pod \"community-operators-vssrx\" (UID: \"9c124885-d64e-4e93-ba92-0f409e1a9c51\") " pod="openshift-marketplace/community-operators-vssrx" Sep 29 10:03:51 crc kubenswrapper[4779]: I0929 10:03:51.258274 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c124885-d64e-4e93-ba92-0f409e1a9c51-utilities\") pod \"community-operators-vssrx\" (UID: \"9c124885-d64e-4e93-ba92-0f409e1a9c51\") " pod="openshift-marketplace/community-operators-vssrx" Sep 29 10:03:51 crc kubenswrapper[4779]: I0929 10:03:51.258354 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c124885-d64e-4e93-ba92-0f409e1a9c51-catalog-content\") pod \"community-operators-vssrx\" (UID: \"9c124885-d64e-4e93-ba92-0f409e1a9c51\") " pod="openshift-marketplace/community-operators-vssrx" Sep 29 10:03:51 crc kubenswrapper[4779]: I0929 10:03:51.258763 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c124885-d64e-4e93-ba92-0f409e1a9c51-catalog-content\") pod \"community-operators-vssrx\" (UID: \"9c124885-d64e-4e93-ba92-0f409e1a9c51\") " pod="openshift-marketplace/community-operators-vssrx" Sep 29 10:03:51 crc kubenswrapper[4779]: I0929 10:03:51.258896 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c124885-d64e-4e93-ba92-0f409e1a9c51-utilities\") pod \"community-operators-vssrx\" (UID: \"9c124885-d64e-4e93-ba92-0f409e1a9c51\") " pod="openshift-marketplace/community-operators-vssrx" Sep 29 10:03:51 crc kubenswrapper[4779]: I0929 10:03:51.277022 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2klpf\" (UniqueName: \"kubernetes.io/projected/9c124885-d64e-4e93-ba92-0f409e1a9c51-kube-api-access-2klpf\") pod \"community-operators-vssrx\" (UID: \"9c124885-d64e-4e93-ba92-0f409e1a9c51\") " pod="openshift-marketplace/community-operators-vssrx" Sep 29 10:03:51 crc kubenswrapper[4779]: I0929 10:03:51.376438 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vssrx" Sep 29 10:03:51 crc kubenswrapper[4779]: I0929 10:03:51.844542 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vssrx"] Sep 29 10:03:52 crc kubenswrapper[4779]: I0929 10:03:52.125111 4779 generic.go:334] "Generic (PLEG): container finished" podID="9c124885-d64e-4e93-ba92-0f409e1a9c51" containerID="8895fb1e083f6f4d11e6916898f51a6f890a3dc876dd02c569ce20ffd0c9a6f4" exitCode=0 Sep 29 10:03:52 crc kubenswrapper[4779]: I0929 10:03:52.125156 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vssrx" event={"ID":"9c124885-d64e-4e93-ba92-0f409e1a9c51","Type":"ContainerDied","Data":"8895fb1e083f6f4d11e6916898f51a6f890a3dc876dd02c569ce20ffd0c9a6f4"} Sep 29 10:03:52 crc kubenswrapper[4779]: I0929 10:03:52.125405 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vssrx" event={"ID":"9c124885-d64e-4e93-ba92-0f409e1a9c51","Type":"ContainerStarted","Data":"4b2030ebda76340aca098654679e110eafccaad4ce8258048b679f1001b24e0a"} Sep 29 10:03:52 crc kubenswrapper[4779]: I0929 10:03:52.127370 4779 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 10:03:53 crc kubenswrapper[4779]: I0929 10:03:53.143885 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vssrx" event={"ID":"9c124885-d64e-4e93-ba92-0f409e1a9c51","Type":"ContainerStarted","Data":"7ae7e0d93e0ce4f7b69a7b35b68b067e8ae79be3ef8486e694fe80e316121bba"} Sep 29 10:03:54 crc kubenswrapper[4779]: I0929 10:03:54.152995 4779 generic.go:334] "Generic (PLEG): container finished" podID="9c124885-d64e-4e93-ba92-0f409e1a9c51" containerID="7ae7e0d93e0ce4f7b69a7b35b68b067e8ae79be3ef8486e694fe80e316121bba" exitCode=0 Sep 29 10:03:54 crc kubenswrapper[4779]: I0929 10:03:54.153764 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vssrx" event={"ID":"9c124885-d64e-4e93-ba92-0f409e1a9c51","Type":"ContainerDied","Data":"7ae7e0d93e0ce4f7b69a7b35b68b067e8ae79be3ef8486e694fe80e316121bba"} Sep 29 10:03:55 crc kubenswrapper[4779]: I0929 10:03:55.163246 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vssrx" event={"ID":"9c124885-d64e-4e93-ba92-0f409e1a9c51","Type":"ContainerStarted","Data":"a6aa65e48cfa7053aee8f881202d3baa6a3cc8fb01f7ad4abfa909a310aecfd0"} Sep 29 10:03:55 crc kubenswrapper[4779]: I0929 10:03:55.183124 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-vssrx" podStartSLOduration=1.657800328 podStartE2EDuration="4.183102665s" podCreationTimestamp="2025-09-29 10:03:51 +0000 UTC" firstStartedPulling="2025-09-29 10:03:52.127096497 +0000 UTC m=+2064.108420401" lastFinishedPulling="2025-09-29 10:03:54.652398834 +0000 UTC m=+2066.633722738" observedRunningTime="2025-09-29 10:03:55.178575063 +0000 UTC m=+2067.159898967" watchObservedRunningTime="2025-09-29 10:03:55.183102665 +0000 UTC m=+2067.164426559" Sep 29 10:03:58 crc kubenswrapper[4779]: I0929 10:03:58.529843 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-pmfgn"] Sep 29 10:03:58 crc kubenswrapper[4779]: I0929 10:03:58.532250 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pmfgn" Sep 29 10:03:58 crc kubenswrapper[4779]: I0929 10:03:58.542379 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-pmfgn"] Sep 29 10:03:58 crc kubenswrapper[4779]: I0929 10:03:58.583799 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0614dfd-131c-44e7-a374-63aaa909b326-utilities\") pod \"redhat-operators-pmfgn\" (UID: \"c0614dfd-131c-44e7-a374-63aaa909b326\") " pod="openshift-marketplace/redhat-operators-pmfgn" Sep 29 10:03:58 crc kubenswrapper[4779]: I0929 10:03:58.583858 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4n957\" (UniqueName: \"kubernetes.io/projected/c0614dfd-131c-44e7-a374-63aaa909b326-kube-api-access-4n957\") pod \"redhat-operators-pmfgn\" (UID: \"c0614dfd-131c-44e7-a374-63aaa909b326\") " pod="openshift-marketplace/redhat-operators-pmfgn" Sep 29 10:03:58 crc kubenswrapper[4779]: I0929 10:03:58.583965 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0614dfd-131c-44e7-a374-63aaa909b326-catalog-content\") pod \"redhat-operators-pmfgn\" (UID: \"c0614dfd-131c-44e7-a374-63aaa909b326\") " pod="openshift-marketplace/redhat-operators-pmfgn" Sep 29 10:03:58 crc kubenswrapper[4779]: I0929 10:03:58.685424 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0614dfd-131c-44e7-a374-63aaa909b326-catalog-content\") pod \"redhat-operators-pmfgn\" (UID: \"c0614dfd-131c-44e7-a374-63aaa909b326\") " pod="openshift-marketplace/redhat-operators-pmfgn" Sep 29 10:03:58 crc kubenswrapper[4779]: I0929 10:03:58.685603 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0614dfd-131c-44e7-a374-63aaa909b326-utilities\") pod \"redhat-operators-pmfgn\" (UID: \"c0614dfd-131c-44e7-a374-63aaa909b326\") " pod="openshift-marketplace/redhat-operators-pmfgn" Sep 29 10:03:58 crc kubenswrapper[4779]: I0929 10:03:58.685630 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4n957\" (UniqueName: \"kubernetes.io/projected/c0614dfd-131c-44e7-a374-63aaa909b326-kube-api-access-4n957\") pod \"redhat-operators-pmfgn\" (UID: \"c0614dfd-131c-44e7-a374-63aaa909b326\") " pod="openshift-marketplace/redhat-operators-pmfgn" Sep 29 10:03:58 crc kubenswrapper[4779]: I0929 10:03:58.686135 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0614dfd-131c-44e7-a374-63aaa909b326-utilities\") pod \"redhat-operators-pmfgn\" (UID: \"c0614dfd-131c-44e7-a374-63aaa909b326\") " pod="openshift-marketplace/redhat-operators-pmfgn" Sep 29 10:03:58 crc kubenswrapper[4779]: I0929 10:03:58.686408 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0614dfd-131c-44e7-a374-63aaa909b326-catalog-content\") pod \"redhat-operators-pmfgn\" (UID: \"c0614dfd-131c-44e7-a374-63aaa909b326\") " pod="openshift-marketplace/redhat-operators-pmfgn" Sep 29 10:03:58 crc kubenswrapper[4779]: I0929 10:03:58.707740 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4n957\" (UniqueName: \"kubernetes.io/projected/c0614dfd-131c-44e7-a374-63aaa909b326-kube-api-access-4n957\") pod \"redhat-operators-pmfgn\" (UID: \"c0614dfd-131c-44e7-a374-63aaa909b326\") " pod="openshift-marketplace/redhat-operators-pmfgn" Sep 29 10:03:58 crc kubenswrapper[4779]: I0929 10:03:58.864237 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pmfgn" Sep 29 10:03:59 crc kubenswrapper[4779]: I0929 10:03:59.349554 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-pmfgn"] Sep 29 10:04:00 crc kubenswrapper[4779]: I0929 10:04:00.205814 4779 generic.go:334] "Generic (PLEG): container finished" podID="c0614dfd-131c-44e7-a374-63aaa909b326" containerID="84928b50f80d22b6d33bf55d377f5da9c6665df9e1c8d6c7ad0d3d5641471dad" exitCode=0 Sep 29 10:04:00 crc kubenswrapper[4779]: I0929 10:04:00.205873 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pmfgn" event={"ID":"c0614dfd-131c-44e7-a374-63aaa909b326","Type":"ContainerDied","Data":"84928b50f80d22b6d33bf55d377f5da9c6665df9e1c8d6c7ad0d3d5641471dad"} Sep 29 10:04:00 crc kubenswrapper[4779]: I0929 10:04:00.206203 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pmfgn" event={"ID":"c0614dfd-131c-44e7-a374-63aaa909b326","Type":"ContainerStarted","Data":"cd0271cb99ae7269ef8ceb836a2467955734461119874784af2f9ac20512e79d"} Sep 29 10:04:01 crc kubenswrapper[4779]: I0929 10:04:01.377214 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-vssrx" Sep 29 10:04:01 crc kubenswrapper[4779]: I0929 10:04:01.377268 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-vssrx" Sep 29 10:04:01 crc kubenswrapper[4779]: I0929 10:04:01.419427 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-vssrx" Sep 29 10:04:02 crc kubenswrapper[4779]: I0929 10:04:02.226217 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pmfgn" event={"ID":"c0614dfd-131c-44e7-a374-63aaa909b326","Type":"ContainerStarted","Data":"d490fd0c52ead5488d81cb3752e741c58c0aa4561c3e4d8a49475170e950478f"} Sep 29 10:04:02 crc kubenswrapper[4779]: I0929 10:04:02.281818 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-vssrx" Sep 29 10:04:03 crc kubenswrapper[4779]: I0929 10:04:03.236168 4779 generic.go:334] "Generic (PLEG): container finished" podID="c0614dfd-131c-44e7-a374-63aaa909b326" containerID="d490fd0c52ead5488d81cb3752e741c58c0aa4561c3e4d8a49475170e950478f" exitCode=0 Sep 29 10:04:03 crc kubenswrapper[4779]: I0929 10:04:03.236213 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pmfgn" event={"ID":"c0614dfd-131c-44e7-a374-63aaa909b326","Type":"ContainerDied","Data":"d490fd0c52ead5488d81cb3752e741c58c0aa4561c3e4d8a49475170e950478f"} Sep 29 10:04:03 crc kubenswrapper[4779]: I0929 10:04:03.724071 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-vssrx"] Sep 29 10:04:04 crc kubenswrapper[4779]: I0929 10:04:04.247668 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pmfgn" event={"ID":"c0614dfd-131c-44e7-a374-63aaa909b326","Type":"ContainerStarted","Data":"1cdd7d6d0a4082d6d5a887d01532c18cd4d3e43d86e3a09968a6852ec7994992"} Sep 29 10:04:04 crc kubenswrapper[4779]: I0929 10:04:04.247781 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-vssrx" podUID="9c124885-d64e-4e93-ba92-0f409e1a9c51" containerName="registry-server" containerID="cri-o://a6aa65e48cfa7053aee8f881202d3baa6a3cc8fb01f7ad4abfa909a310aecfd0" gracePeriod=2 Sep 29 10:04:04 crc kubenswrapper[4779]: I0929 10:04:04.269747 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-pmfgn" podStartSLOduration=2.85638458 podStartE2EDuration="6.269731054s" podCreationTimestamp="2025-09-29 10:03:58 +0000 UTC" firstStartedPulling="2025-09-29 10:04:00.209049535 +0000 UTC m=+2072.190373429" lastFinishedPulling="2025-09-29 10:04:03.622395999 +0000 UTC m=+2075.603719903" observedRunningTime="2025-09-29 10:04:04.268832718 +0000 UTC m=+2076.250156632" watchObservedRunningTime="2025-09-29 10:04:04.269731054 +0000 UTC m=+2076.251054958" Sep 29 10:04:04 crc kubenswrapper[4779]: I0929 10:04:04.736373 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vssrx" Sep 29 10:04:04 crc kubenswrapper[4779]: I0929 10:04:04.907050 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c124885-d64e-4e93-ba92-0f409e1a9c51-catalog-content\") pod \"9c124885-d64e-4e93-ba92-0f409e1a9c51\" (UID: \"9c124885-d64e-4e93-ba92-0f409e1a9c51\") " Sep 29 10:04:04 crc kubenswrapper[4779]: I0929 10:04:04.907115 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c124885-d64e-4e93-ba92-0f409e1a9c51-utilities\") pod \"9c124885-d64e-4e93-ba92-0f409e1a9c51\" (UID: \"9c124885-d64e-4e93-ba92-0f409e1a9c51\") " Sep 29 10:04:04 crc kubenswrapper[4779]: I0929 10:04:04.907144 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2klpf\" (UniqueName: \"kubernetes.io/projected/9c124885-d64e-4e93-ba92-0f409e1a9c51-kube-api-access-2klpf\") pod \"9c124885-d64e-4e93-ba92-0f409e1a9c51\" (UID: \"9c124885-d64e-4e93-ba92-0f409e1a9c51\") " Sep 29 10:04:04 crc kubenswrapper[4779]: I0929 10:04:04.908140 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9c124885-d64e-4e93-ba92-0f409e1a9c51-utilities" (OuterVolumeSpecName: "utilities") pod "9c124885-d64e-4e93-ba92-0f409e1a9c51" (UID: "9c124885-d64e-4e93-ba92-0f409e1a9c51"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 10:04:04 crc kubenswrapper[4779]: I0929 10:04:04.916161 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c124885-d64e-4e93-ba92-0f409e1a9c51-kube-api-access-2klpf" (OuterVolumeSpecName: "kube-api-access-2klpf") pod "9c124885-d64e-4e93-ba92-0f409e1a9c51" (UID: "9c124885-d64e-4e93-ba92-0f409e1a9c51"). InnerVolumeSpecName "kube-api-access-2klpf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 10:04:04 crc kubenswrapper[4779]: I0929 10:04:04.948136 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9c124885-d64e-4e93-ba92-0f409e1a9c51-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9c124885-d64e-4e93-ba92-0f409e1a9c51" (UID: "9c124885-d64e-4e93-ba92-0f409e1a9c51"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 10:04:05 crc kubenswrapper[4779]: I0929 10:04:05.010105 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c124885-d64e-4e93-ba92-0f409e1a9c51-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 10:04:05 crc kubenswrapper[4779]: I0929 10:04:05.010189 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c124885-d64e-4e93-ba92-0f409e1a9c51-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 10:04:05 crc kubenswrapper[4779]: I0929 10:04:05.010212 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2klpf\" (UniqueName: \"kubernetes.io/projected/9c124885-d64e-4e93-ba92-0f409e1a9c51-kube-api-access-2klpf\") on node \"crc\" DevicePath \"\"" Sep 29 10:04:05 crc kubenswrapper[4779]: I0929 10:04:05.257100 4779 generic.go:334] "Generic (PLEG): container finished" podID="9c124885-d64e-4e93-ba92-0f409e1a9c51" containerID="a6aa65e48cfa7053aee8f881202d3baa6a3cc8fb01f7ad4abfa909a310aecfd0" exitCode=0 Sep 29 10:04:05 crc kubenswrapper[4779]: I0929 10:04:05.257190 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vssrx" Sep 29 10:04:05 crc kubenswrapper[4779]: I0929 10:04:05.257176 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vssrx" event={"ID":"9c124885-d64e-4e93-ba92-0f409e1a9c51","Type":"ContainerDied","Data":"a6aa65e48cfa7053aee8f881202d3baa6a3cc8fb01f7ad4abfa909a310aecfd0"} Sep 29 10:04:05 crc kubenswrapper[4779]: I0929 10:04:05.257966 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vssrx" event={"ID":"9c124885-d64e-4e93-ba92-0f409e1a9c51","Type":"ContainerDied","Data":"4b2030ebda76340aca098654679e110eafccaad4ce8258048b679f1001b24e0a"} Sep 29 10:04:05 crc kubenswrapper[4779]: I0929 10:04:05.257999 4779 scope.go:117] "RemoveContainer" containerID="a6aa65e48cfa7053aee8f881202d3baa6a3cc8fb01f7ad4abfa909a310aecfd0" Sep 29 10:04:05 crc kubenswrapper[4779]: I0929 10:04:05.280540 4779 scope.go:117] "RemoveContainer" containerID="7ae7e0d93e0ce4f7b69a7b35b68b067e8ae79be3ef8486e694fe80e316121bba" Sep 29 10:04:05 crc kubenswrapper[4779]: I0929 10:04:05.289320 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-vssrx"] Sep 29 10:04:05 crc kubenswrapper[4779]: I0929 10:04:05.303455 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-vssrx"] Sep 29 10:04:05 crc kubenswrapper[4779]: I0929 10:04:05.311214 4779 scope.go:117] "RemoveContainer" containerID="8895fb1e083f6f4d11e6916898f51a6f890a3dc876dd02c569ce20ffd0c9a6f4" Sep 29 10:04:05 crc kubenswrapper[4779]: I0929 10:04:05.360454 4779 scope.go:117] "RemoveContainer" containerID="a6aa65e48cfa7053aee8f881202d3baa6a3cc8fb01f7ad4abfa909a310aecfd0" Sep 29 10:04:05 crc kubenswrapper[4779]: E0929 10:04:05.360850 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a6aa65e48cfa7053aee8f881202d3baa6a3cc8fb01f7ad4abfa909a310aecfd0\": container with ID starting with a6aa65e48cfa7053aee8f881202d3baa6a3cc8fb01f7ad4abfa909a310aecfd0 not found: ID does not exist" containerID="a6aa65e48cfa7053aee8f881202d3baa6a3cc8fb01f7ad4abfa909a310aecfd0" Sep 29 10:04:05 crc kubenswrapper[4779]: I0929 10:04:05.360890 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a6aa65e48cfa7053aee8f881202d3baa6a3cc8fb01f7ad4abfa909a310aecfd0"} err="failed to get container status \"a6aa65e48cfa7053aee8f881202d3baa6a3cc8fb01f7ad4abfa909a310aecfd0\": rpc error: code = NotFound desc = could not find container \"a6aa65e48cfa7053aee8f881202d3baa6a3cc8fb01f7ad4abfa909a310aecfd0\": container with ID starting with a6aa65e48cfa7053aee8f881202d3baa6a3cc8fb01f7ad4abfa909a310aecfd0 not found: ID does not exist" Sep 29 10:04:05 crc kubenswrapper[4779]: I0929 10:04:05.360937 4779 scope.go:117] "RemoveContainer" containerID="7ae7e0d93e0ce4f7b69a7b35b68b067e8ae79be3ef8486e694fe80e316121bba" Sep 29 10:04:05 crc kubenswrapper[4779]: E0929 10:04:05.361249 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7ae7e0d93e0ce4f7b69a7b35b68b067e8ae79be3ef8486e694fe80e316121bba\": container with ID starting with 7ae7e0d93e0ce4f7b69a7b35b68b067e8ae79be3ef8486e694fe80e316121bba not found: ID does not exist" containerID="7ae7e0d93e0ce4f7b69a7b35b68b067e8ae79be3ef8486e694fe80e316121bba" Sep 29 10:04:05 crc kubenswrapper[4779]: I0929 10:04:05.361277 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7ae7e0d93e0ce4f7b69a7b35b68b067e8ae79be3ef8486e694fe80e316121bba"} err="failed to get container status \"7ae7e0d93e0ce4f7b69a7b35b68b067e8ae79be3ef8486e694fe80e316121bba\": rpc error: code = NotFound desc = could not find container \"7ae7e0d93e0ce4f7b69a7b35b68b067e8ae79be3ef8486e694fe80e316121bba\": container with ID starting with 7ae7e0d93e0ce4f7b69a7b35b68b067e8ae79be3ef8486e694fe80e316121bba not found: ID does not exist" Sep 29 10:04:05 crc kubenswrapper[4779]: I0929 10:04:05.361291 4779 scope.go:117] "RemoveContainer" containerID="8895fb1e083f6f4d11e6916898f51a6f890a3dc876dd02c569ce20ffd0c9a6f4" Sep 29 10:04:05 crc kubenswrapper[4779]: E0929 10:04:05.361870 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8895fb1e083f6f4d11e6916898f51a6f890a3dc876dd02c569ce20ffd0c9a6f4\": container with ID starting with 8895fb1e083f6f4d11e6916898f51a6f890a3dc876dd02c569ce20ffd0c9a6f4 not found: ID does not exist" containerID="8895fb1e083f6f4d11e6916898f51a6f890a3dc876dd02c569ce20ffd0c9a6f4" Sep 29 10:04:05 crc kubenswrapper[4779]: I0929 10:04:05.361896 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8895fb1e083f6f4d11e6916898f51a6f890a3dc876dd02c569ce20ffd0c9a6f4"} err="failed to get container status \"8895fb1e083f6f4d11e6916898f51a6f890a3dc876dd02c569ce20ffd0c9a6f4\": rpc error: code = NotFound desc = could not find container \"8895fb1e083f6f4d11e6916898f51a6f890a3dc876dd02c569ce20ffd0c9a6f4\": container with ID starting with 8895fb1e083f6f4d11e6916898f51a6f890a3dc876dd02c569ce20ffd0c9a6f4 not found: ID does not exist" Sep 29 10:04:06 crc kubenswrapper[4779]: I0929 10:04:06.745060 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9c124885-d64e-4e93-ba92-0f409e1a9c51" path="/var/lib/kubelet/pods/9c124885-d64e-4e93-ba92-0f409e1a9c51/volumes" Sep 29 10:04:08 crc kubenswrapper[4779]: I0929 10:04:08.865356 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-pmfgn" Sep 29 10:04:08 crc kubenswrapper[4779]: I0929 10:04:08.865819 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-pmfgn" Sep 29 10:04:08 crc kubenswrapper[4779]: I0929 10:04:08.911192 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-pmfgn" Sep 29 10:04:09 crc kubenswrapper[4779]: I0929 10:04:09.356792 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-pmfgn" Sep 29 10:04:09 crc kubenswrapper[4779]: I0929 10:04:09.734707 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-pmfgn"] Sep 29 10:04:11 crc kubenswrapper[4779]: I0929 10:04:11.322113 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-pmfgn" podUID="c0614dfd-131c-44e7-a374-63aaa909b326" containerName="registry-server" containerID="cri-o://1cdd7d6d0a4082d6d5a887d01532c18cd4d3e43d86e3a09968a6852ec7994992" gracePeriod=2 Sep 29 10:04:11 crc kubenswrapper[4779]: I0929 10:04:11.778121 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pmfgn" Sep 29 10:04:11 crc kubenswrapper[4779]: I0929 10:04:11.955997 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0614dfd-131c-44e7-a374-63aaa909b326-catalog-content\") pod \"c0614dfd-131c-44e7-a374-63aaa909b326\" (UID: \"c0614dfd-131c-44e7-a374-63aaa909b326\") " Sep 29 10:04:11 crc kubenswrapper[4779]: I0929 10:04:11.956095 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0614dfd-131c-44e7-a374-63aaa909b326-utilities\") pod \"c0614dfd-131c-44e7-a374-63aaa909b326\" (UID: \"c0614dfd-131c-44e7-a374-63aaa909b326\") " Sep 29 10:04:11 crc kubenswrapper[4779]: I0929 10:04:11.956243 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4n957\" (UniqueName: \"kubernetes.io/projected/c0614dfd-131c-44e7-a374-63aaa909b326-kube-api-access-4n957\") pod \"c0614dfd-131c-44e7-a374-63aaa909b326\" (UID: \"c0614dfd-131c-44e7-a374-63aaa909b326\") " Sep 29 10:04:11 crc kubenswrapper[4779]: I0929 10:04:11.957893 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c0614dfd-131c-44e7-a374-63aaa909b326-utilities" (OuterVolumeSpecName: "utilities") pod "c0614dfd-131c-44e7-a374-63aaa909b326" (UID: "c0614dfd-131c-44e7-a374-63aaa909b326"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 10:04:11 crc kubenswrapper[4779]: I0929 10:04:11.963783 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c0614dfd-131c-44e7-a374-63aaa909b326-kube-api-access-4n957" (OuterVolumeSpecName: "kube-api-access-4n957") pod "c0614dfd-131c-44e7-a374-63aaa909b326" (UID: "c0614dfd-131c-44e7-a374-63aaa909b326"). InnerVolumeSpecName "kube-api-access-4n957". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 10:04:12 crc kubenswrapper[4779]: I0929 10:04:12.058467 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0614dfd-131c-44e7-a374-63aaa909b326-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 10:04:12 crc kubenswrapper[4779]: I0929 10:04:12.058738 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4n957\" (UniqueName: \"kubernetes.io/projected/c0614dfd-131c-44e7-a374-63aaa909b326-kube-api-access-4n957\") on node \"crc\" DevicePath \"\"" Sep 29 10:04:12 crc kubenswrapper[4779]: I0929 10:04:12.058599 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c0614dfd-131c-44e7-a374-63aaa909b326-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c0614dfd-131c-44e7-a374-63aaa909b326" (UID: "c0614dfd-131c-44e7-a374-63aaa909b326"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 10:04:12 crc kubenswrapper[4779]: I0929 10:04:12.160942 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0614dfd-131c-44e7-a374-63aaa909b326-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 10:04:12 crc kubenswrapper[4779]: I0929 10:04:12.332721 4779 generic.go:334] "Generic (PLEG): container finished" podID="c0614dfd-131c-44e7-a374-63aaa909b326" containerID="1cdd7d6d0a4082d6d5a887d01532c18cd4d3e43d86e3a09968a6852ec7994992" exitCode=0 Sep 29 10:04:12 crc kubenswrapper[4779]: I0929 10:04:12.332777 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pmfgn" event={"ID":"c0614dfd-131c-44e7-a374-63aaa909b326","Type":"ContainerDied","Data":"1cdd7d6d0a4082d6d5a887d01532c18cd4d3e43d86e3a09968a6852ec7994992"} Sep 29 10:04:12 crc kubenswrapper[4779]: I0929 10:04:12.332814 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pmfgn" event={"ID":"c0614dfd-131c-44e7-a374-63aaa909b326","Type":"ContainerDied","Data":"cd0271cb99ae7269ef8ceb836a2467955734461119874784af2f9ac20512e79d"} Sep 29 10:04:12 crc kubenswrapper[4779]: I0929 10:04:12.332840 4779 scope.go:117] "RemoveContainer" containerID="1cdd7d6d0a4082d6d5a887d01532c18cd4d3e43d86e3a09968a6852ec7994992" Sep 29 10:04:12 crc kubenswrapper[4779]: I0929 10:04:12.332834 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pmfgn" Sep 29 10:04:12 crc kubenswrapper[4779]: I0929 10:04:12.367514 4779 scope.go:117] "RemoveContainer" containerID="d490fd0c52ead5488d81cb3752e741c58c0aa4561c3e4d8a49475170e950478f" Sep 29 10:04:12 crc kubenswrapper[4779]: I0929 10:04:12.376264 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-pmfgn"] Sep 29 10:04:12 crc kubenswrapper[4779]: I0929 10:04:12.382804 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-pmfgn"] Sep 29 10:04:12 crc kubenswrapper[4779]: I0929 10:04:12.407671 4779 scope.go:117] "RemoveContainer" containerID="84928b50f80d22b6d33bf55d377f5da9c6665df9e1c8d6c7ad0d3d5641471dad" Sep 29 10:04:12 crc kubenswrapper[4779]: I0929 10:04:12.427511 4779 scope.go:117] "RemoveContainer" containerID="1cdd7d6d0a4082d6d5a887d01532c18cd4d3e43d86e3a09968a6852ec7994992" Sep 29 10:04:12 crc kubenswrapper[4779]: E0929 10:04:12.428093 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1cdd7d6d0a4082d6d5a887d01532c18cd4d3e43d86e3a09968a6852ec7994992\": container with ID starting with 1cdd7d6d0a4082d6d5a887d01532c18cd4d3e43d86e3a09968a6852ec7994992 not found: ID does not exist" containerID="1cdd7d6d0a4082d6d5a887d01532c18cd4d3e43d86e3a09968a6852ec7994992" Sep 29 10:04:12 crc kubenswrapper[4779]: I0929 10:04:12.428135 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1cdd7d6d0a4082d6d5a887d01532c18cd4d3e43d86e3a09968a6852ec7994992"} err="failed to get container status \"1cdd7d6d0a4082d6d5a887d01532c18cd4d3e43d86e3a09968a6852ec7994992\": rpc error: code = NotFound desc = could not find container \"1cdd7d6d0a4082d6d5a887d01532c18cd4d3e43d86e3a09968a6852ec7994992\": container with ID starting with 1cdd7d6d0a4082d6d5a887d01532c18cd4d3e43d86e3a09968a6852ec7994992 not found: ID does not exist" Sep 29 10:04:12 crc kubenswrapper[4779]: I0929 10:04:12.428162 4779 scope.go:117] "RemoveContainer" containerID="d490fd0c52ead5488d81cb3752e741c58c0aa4561c3e4d8a49475170e950478f" Sep 29 10:04:12 crc kubenswrapper[4779]: E0929 10:04:12.428588 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d490fd0c52ead5488d81cb3752e741c58c0aa4561c3e4d8a49475170e950478f\": container with ID starting with d490fd0c52ead5488d81cb3752e741c58c0aa4561c3e4d8a49475170e950478f not found: ID does not exist" containerID="d490fd0c52ead5488d81cb3752e741c58c0aa4561c3e4d8a49475170e950478f" Sep 29 10:04:12 crc kubenswrapper[4779]: I0929 10:04:12.428624 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d490fd0c52ead5488d81cb3752e741c58c0aa4561c3e4d8a49475170e950478f"} err="failed to get container status \"d490fd0c52ead5488d81cb3752e741c58c0aa4561c3e4d8a49475170e950478f\": rpc error: code = NotFound desc = could not find container \"d490fd0c52ead5488d81cb3752e741c58c0aa4561c3e4d8a49475170e950478f\": container with ID starting with d490fd0c52ead5488d81cb3752e741c58c0aa4561c3e4d8a49475170e950478f not found: ID does not exist" Sep 29 10:04:12 crc kubenswrapper[4779]: I0929 10:04:12.428666 4779 scope.go:117] "RemoveContainer" containerID="84928b50f80d22b6d33bf55d377f5da9c6665df9e1c8d6c7ad0d3d5641471dad" Sep 29 10:04:12 crc kubenswrapper[4779]: E0929 10:04:12.429041 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"84928b50f80d22b6d33bf55d377f5da9c6665df9e1c8d6c7ad0d3d5641471dad\": container with ID starting with 84928b50f80d22b6d33bf55d377f5da9c6665df9e1c8d6c7ad0d3d5641471dad not found: ID does not exist" containerID="84928b50f80d22b6d33bf55d377f5da9c6665df9e1c8d6c7ad0d3d5641471dad" Sep 29 10:04:12 crc kubenswrapper[4779]: I0929 10:04:12.429075 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"84928b50f80d22b6d33bf55d377f5da9c6665df9e1c8d6c7ad0d3d5641471dad"} err="failed to get container status \"84928b50f80d22b6d33bf55d377f5da9c6665df9e1c8d6c7ad0d3d5641471dad\": rpc error: code = NotFound desc = could not find container \"84928b50f80d22b6d33bf55d377f5da9c6665df9e1c8d6c7ad0d3d5641471dad\": container with ID starting with 84928b50f80d22b6d33bf55d377f5da9c6665df9e1c8d6c7ad0d3d5641471dad not found: ID does not exist" Sep 29 10:04:12 crc kubenswrapper[4779]: I0929 10:04:12.727248 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c0614dfd-131c-44e7-a374-63aaa909b326" path="/var/lib/kubelet/pods/c0614dfd-131c-44e7-a374-63aaa909b326/volumes" Sep 29 10:04:38 crc kubenswrapper[4779]: I0929 10:04:38.491430 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-kc7sd"] Sep 29 10:04:38 crc kubenswrapper[4779]: E0929 10:04:38.492368 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c124885-d64e-4e93-ba92-0f409e1a9c51" containerName="extract-utilities" Sep 29 10:04:38 crc kubenswrapper[4779]: I0929 10:04:38.492386 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c124885-d64e-4e93-ba92-0f409e1a9c51" containerName="extract-utilities" Sep 29 10:04:38 crc kubenswrapper[4779]: E0929 10:04:38.492397 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c124885-d64e-4e93-ba92-0f409e1a9c51" containerName="registry-server" Sep 29 10:04:38 crc kubenswrapper[4779]: I0929 10:04:38.492405 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c124885-d64e-4e93-ba92-0f409e1a9c51" containerName="registry-server" Sep 29 10:04:38 crc kubenswrapper[4779]: E0929 10:04:38.492421 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c124885-d64e-4e93-ba92-0f409e1a9c51" containerName="extract-content" Sep 29 10:04:38 crc kubenswrapper[4779]: I0929 10:04:38.492428 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c124885-d64e-4e93-ba92-0f409e1a9c51" containerName="extract-content" Sep 29 10:04:38 crc kubenswrapper[4779]: E0929 10:04:38.492444 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0614dfd-131c-44e7-a374-63aaa909b326" containerName="extract-utilities" Sep 29 10:04:38 crc kubenswrapper[4779]: I0929 10:04:38.492454 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0614dfd-131c-44e7-a374-63aaa909b326" containerName="extract-utilities" Sep 29 10:04:38 crc kubenswrapper[4779]: E0929 10:04:38.492484 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0614dfd-131c-44e7-a374-63aaa909b326" containerName="registry-server" Sep 29 10:04:38 crc kubenswrapper[4779]: I0929 10:04:38.492491 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0614dfd-131c-44e7-a374-63aaa909b326" containerName="registry-server" Sep 29 10:04:38 crc kubenswrapper[4779]: E0929 10:04:38.492509 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0614dfd-131c-44e7-a374-63aaa909b326" containerName="extract-content" Sep 29 10:04:38 crc kubenswrapper[4779]: I0929 10:04:38.492516 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0614dfd-131c-44e7-a374-63aaa909b326" containerName="extract-content" Sep 29 10:04:38 crc kubenswrapper[4779]: I0929 10:04:38.492717 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0614dfd-131c-44e7-a374-63aaa909b326" containerName="registry-server" Sep 29 10:04:38 crc kubenswrapper[4779]: I0929 10:04:38.492730 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c124885-d64e-4e93-ba92-0f409e1a9c51" containerName="registry-server" Sep 29 10:04:38 crc kubenswrapper[4779]: I0929 10:04:38.494057 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kc7sd" Sep 29 10:04:38 crc kubenswrapper[4779]: I0929 10:04:38.505822 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-kc7sd"] Sep 29 10:04:38 crc kubenswrapper[4779]: I0929 10:04:38.508749 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fn9sq\" (UniqueName: \"kubernetes.io/projected/c7595379-7da8-4421-9503-82d3059ebc98-kube-api-access-fn9sq\") pod \"certified-operators-kc7sd\" (UID: \"c7595379-7da8-4421-9503-82d3059ebc98\") " pod="openshift-marketplace/certified-operators-kc7sd" Sep 29 10:04:38 crc kubenswrapper[4779]: I0929 10:04:38.508813 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c7595379-7da8-4421-9503-82d3059ebc98-utilities\") pod \"certified-operators-kc7sd\" (UID: \"c7595379-7da8-4421-9503-82d3059ebc98\") " pod="openshift-marketplace/certified-operators-kc7sd" Sep 29 10:04:38 crc kubenswrapper[4779]: I0929 10:04:38.508879 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c7595379-7da8-4421-9503-82d3059ebc98-catalog-content\") pod \"certified-operators-kc7sd\" (UID: \"c7595379-7da8-4421-9503-82d3059ebc98\") " pod="openshift-marketplace/certified-operators-kc7sd" Sep 29 10:04:38 crc kubenswrapper[4779]: I0929 10:04:38.610369 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fn9sq\" (UniqueName: \"kubernetes.io/projected/c7595379-7da8-4421-9503-82d3059ebc98-kube-api-access-fn9sq\") pod \"certified-operators-kc7sd\" (UID: \"c7595379-7da8-4421-9503-82d3059ebc98\") " pod="openshift-marketplace/certified-operators-kc7sd" Sep 29 10:04:38 crc kubenswrapper[4779]: I0929 10:04:38.610428 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c7595379-7da8-4421-9503-82d3059ebc98-utilities\") pod \"certified-operators-kc7sd\" (UID: \"c7595379-7da8-4421-9503-82d3059ebc98\") " pod="openshift-marketplace/certified-operators-kc7sd" Sep 29 10:04:38 crc kubenswrapper[4779]: I0929 10:04:38.610484 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c7595379-7da8-4421-9503-82d3059ebc98-catalog-content\") pod \"certified-operators-kc7sd\" (UID: \"c7595379-7da8-4421-9503-82d3059ebc98\") " pod="openshift-marketplace/certified-operators-kc7sd" Sep 29 10:04:38 crc kubenswrapper[4779]: I0929 10:04:38.611048 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c7595379-7da8-4421-9503-82d3059ebc98-catalog-content\") pod \"certified-operators-kc7sd\" (UID: \"c7595379-7da8-4421-9503-82d3059ebc98\") " pod="openshift-marketplace/certified-operators-kc7sd" Sep 29 10:04:38 crc kubenswrapper[4779]: I0929 10:04:38.611057 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c7595379-7da8-4421-9503-82d3059ebc98-utilities\") pod \"certified-operators-kc7sd\" (UID: \"c7595379-7da8-4421-9503-82d3059ebc98\") " pod="openshift-marketplace/certified-operators-kc7sd" Sep 29 10:04:38 crc kubenswrapper[4779]: I0929 10:04:38.636883 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fn9sq\" (UniqueName: \"kubernetes.io/projected/c7595379-7da8-4421-9503-82d3059ebc98-kube-api-access-fn9sq\") pod \"certified-operators-kc7sd\" (UID: \"c7595379-7da8-4421-9503-82d3059ebc98\") " pod="openshift-marketplace/certified-operators-kc7sd" Sep 29 10:04:38 crc kubenswrapper[4779]: I0929 10:04:38.825851 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kc7sd" Sep 29 10:04:39 crc kubenswrapper[4779]: I0929 10:04:39.423815 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-kc7sd"] Sep 29 10:04:39 crc kubenswrapper[4779]: I0929 10:04:39.596799 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kc7sd" event={"ID":"c7595379-7da8-4421-9503-82d3059ebc98","Type":"ContainerStarted","Data":"01c39224b7ec99c3f07aa830663fe3e7ad1fcdfb049807165c01b42ac89e1bde"} Sep 29 10:04:40 crc kubenswrapper[4779]: I0929 10:04:40.607937 4779 generic.go:334] "Generic (PLEG): container finished" podID="c7595379-7da8-4421-9503-82d3059ebc98" containerID="fa298e0e0bf233a5d299c51b6824d51ff002b6b411853739694d9633612a55b0" exitCode=0 Sep 29 10:04:40 crc kubenswrapper[4779]: I0929 10:04:40.608038 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kc7sd" event={"ID":"c7595379-7da8-4421-9503-82d3059ebc98","Type":"ContainerDied","Data":"fa298e0e0bf233a5d299c51b6824d51ff002b6b411853739694d9633612a55b0"} Sep 29 10:04:42 crc kubenswrapper[4779]: I0929 10:04:42.631026 4779 generic.go:334] "Generic (PLEG): container finished" podID="c7595379-7da8-4421-9503-82d3059ebc98" containerID="6e59fbf31d8a321468420e3c5a73f174f4ad5d51dc30362bc424c7c1edfd73f1" exitCode=0 Sep 29 10:04:42 crc kubenswrapper[4779]: I0929 10:04:42.631237 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kc7sd" event={"ID":"c7595379-7da8-4421-9503-82d3059ebc98","Type":"ContainerDied","Data":"6e59fbf31d8a321468420e3c5a73f174f4ad5d51dc30362bc424c7c1edfd73f1"} Sep 29 10:04:43 crc kubenswrapper[4779]: I0929 10:04:43.677272 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kc7sd" event={"ID":"c7595379-7da8-4421-9503-82d3059ebc98","Type":"ContainerStarted","Data":"1eaf14fd0c045dfd536217721d5e03f2a5faf74e1613a471afc465ef94e9ab5a"} Sep 29 10:04:43 crc kubenswrapper[4779]: I0929 10:04:43.701490 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-kc7sd" podStartSLOduration=3.223828854 podStartE2EDuration="5.701467216s" podCreationTimestamp="2025-09-29 10:04:38 +0000 UTC" firstStartedPulling="2025-09-29 10:04:40.610095388 +0000 UTC m=+2112.591419292" lastFinishedPulling="2025-09-29 10:04:43.08773375 +0000 UTC m=+2115.069057654" observedRunningTime="2025-09-29 10:04:43.695355438 +0000 UTC m=+2115.676679352" watchObservedRunningTime="2025-09-29 10:04:43.701467216 +0000 UTC m=+2115.682791120" Sep 29 10:04:48 crc kubenswrapper[4779]: I0929 10:04:48.826636 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-kc7sd" Sep 29 10:04:48 crc kubenswrapper[4779]: I0929 10:04:48.828496 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-kc7sd" Sep 29 10:04:48 crc kubenswrapper[4779]: I0929 10:04:48.881253 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-kc7sd" Sep 29 10:04:49 crc kubenswrapper[4779]: I0929 10:04:49.791282 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-kc7sd" Sep 29 10:04:49 crc kubenswrapper[4779]: I0929 10:04:49.843817 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-kc7sd"] Sep 29 10:04:51 crc kubenswrapper[4779]: I0929 10:04:51.756713 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-kc7sd" podUID="c7595379-7da8-4421-9503-82d3059ebc98" containerName="registry-server" containerID="cri-o://1eaf14fd0c045dfd536217721d5e03f2a5faf74e1613a471afc465ef94e9ab5a" gracePeriod=2 Sep 29 10:04:52 crc kubenswrapper[4779]: I0929 10:04:52.195604 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kc7sd" Sep 29 10:04:52 crc kubenswrapper[4779]: I0929 10:04:52.368664 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fn9sq\" (UniqueName: \"kubernetes.io/projected/c7595379-7da8-4421-9503-82d3059ebc98-kube-api-access-fn9sq\") pod \"c7595379-7da8-4421-9503-82d3059ebc98\" (UID: \"c7595379-7da8-4421-9503-82d3059ebc98\") " Sep 29 10:04:52 crc kubenswrapper[4779]: I0929 10:04:52.368824 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c7595379-7da8-4421-9503-82d3059ebc98-catalog-content\") pod \"c7595379-7da8-4421-9503-82d3059ebc98\" (UID: \"c7595379-7da8-4421-9503-82d3059ebc98\") " Sep 29 10:04:52 crc kubenswrapper[4779]: I0929 10:04:52.369306 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c7595379-7da8-4421-9503-82d3059ebc98-utilities\") pod \"c7595379-7da8-4421-9503-82d3059ebc98\" (UID: \"c7595379-7da8-4421-9503-82d3059ebc98\") " Sep 29 10:04:52 crc kubenswrapper[4779]: I0929 10:04:52.370324 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c7595379-7da8-4421-9503-82d3059ebc98-utilities" (OuterVolumeSpecName: "utilities") pod "c7595379-7da8-4421-9503-82d3059ebc98" (UID: "c7595379-7da8-4421-9503-82d3059ebc98"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 10:04:52 crc kubenswrapper[4779]: I0929 10:04:52.376782 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c7595379-7da8-4421-9503-82d3059ebc98-kube-api-access-fn9sq" (OuterVolumeSpecName: "kube-api-access-fn9sq") pod "c7595379-7da8-4421-9503-82d3059ebc98" (UID: "c7595379-7da8-4421-9503-82d3059ebc98"). InnerVolumeSpecName "kube-api-access-fn9sq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 10:04:52 crc kubenswrapper[4779]: I0929 10:04:52.425311 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c7595379-7da8-4421-9503-82d3059ebc98-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c7595379-7da8-4421-9503-82d3059ebc98" (UID: "c7595379-7da8-4421-9503-82d3059ebc98"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 10:04:52 crc kubenswrapper[4779]: I0929 10:04:52.471743 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c7595379-7da8-4421-9503-82d3059ebc98-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 10:04:52 crc kubenswrapper[4779]: I0929 10:04:52.472087 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c7595379-7da8-4421-9503-82d3059ebc98-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 10:04:52 crc kubenswrapper[4779]: I0929 10:04:52.472101 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fn9sq\" (UniqueName: \"kubernetes.io/projected/c7595379-7da8-4421-9503-82d3059ebc98-kube-api-access-fn9sq\") on node \"crc\" DevicePath \"\"" Sep 29 10:04:52 crc kubenswrapper[4779]: I0929 10:04:52.773893 4779 generic.go:334] "Generic (PLEG): container finished" podID="c7595379-7da8-4421-9503-82d3059ebc98" containerID="1eaf14fd0c045dfd536217721d5e03f2a5faf74e1613a471afc465ef94e9ab5a" exitCode=0 Sep 29 10:04:52 crc kubenswrapper[4779]: I0929 10:04:52.773948 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kc7sd" event={"ID":"c7595379-7da8-4421-9503-82d3059ebc98","Type":"ContainerDied","Data":"1eaf14fd0c045dfd536217721d5e03f2a5faf74e1613a471afc465ef94e9ab5a"} Sep 29 10:04:52 crc kubenswrapper[4779]: I0929 10:04:52.773992 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kc7sd" event={"ID":"c7595379-7da8-4421-9503-82d3059ebc98","Type":"ContainerDied","Data":"01c39224b7ec99c3f07aa830663fe3e7ad1fcdfb049807165c01b42ac89e1bde"} Sep 29 10:04:52 crc kubenswrapper[4779]: I0929 10:04:52.774010 4779 scope.go:117] "RemoveContainer" containerID="1eaf14fd0c045dfd536217721d5e03f2a5faf74e1613a471afc465ef94e9ab5a" Sep 29 10:04:52 crc kubenswrapper[4779]: I0929 10:04:52.774036 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kc7sd" Sep 29 10:04:52 crc kubenswrapper[4779]: I0929 10:04:52.810262 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-kc7sd"] Sep 29 10:04:52 crc kubenswrapper[4779]: I0929 10:04:52.818883 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-kc7sd"] Sep 29 10:04:52 crc kubenswrapper[4779]: I0929 10:04:52.821655 4779 scope.go:117] "RemoveContainer" containerID="6e59fbf31d8a321468420e3c5a73f174f4ad5d51dc30362bc424c7c1edfd73f1" Sep 29 10:04:52 crc kubenswrapper[4779]: I0929 10:04:52.844190 4779 scope.go:117] "RemoveContainer" containerID="fa298e0e0bf233a5d299c51b6824d51ff002b6b411853739694d9633612a55b0" Sep 29 10:04:52 crc kubenswrapper[4779]: I0929 10:04:52.888046 4779 scope.go:117] "RemoveContainer" containerID="1eaf14fd0c045dfd536217721d5e03f2a5faf74e1613a471afc465ef94e9ab5a" Sep 29 10:04:52 crc kubenswrapper[4779]: E0929 10:04:52.888693 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1eaf14fd0c045dfd536217721d5e03f2a5faf74e1613a471afc465ef94e9ab5a\": container with ID starting with 1eaf14fd0c045dfd536217721d5e03f2a5faf74e1613a471afc465ef94e9ab5a not found: ID does not exist" containerID="1eaf14fd0c045dfd536217721d5e03f2a5faf74e1613a471afc465ef94e9ab5a" Sep 29 10:04:52 crc kubenswrapper[4779]: I0929 10:04:52.888742 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1eaf14fd0c045dfd536217721d5e03f2a5faf74e1613a471afc465ef94e9ab5a"} err="failed to get container status \"1eaf14fd0c045dfd536217721d5e03f2a5faf74e1613a471afc465ef94e9ab5a\": rpc error: code = NotFound desc = could not find container \"1eaf14fd0c045dfd536217721d5e03f2a5faf74e1613a471afc465ef94e9ab5a\": container with ID starting with 1eaf14fd0c045dfd536217721d5e03f2a5faf74e1613a471afc465ef94e9ab5a not found: ID does not exist" Sep 29 10:04:52 crc kubenswrapper[4779]: I0929 10:04:52.888769 4779 scope.go:117] "RemoveContainer" containerID="6e59fbf31d8a321468420e3c5a73f174f4ad5d51dc30362bc424c7c1edfd73f1" Sep 29 10:04:52 crc kubenswrapper[4779]: E0929 10:04:52.889160 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6e59fbf31d8a321468420e3c5a73f174f4ad5d51dc30362bc424c7c1edfd73f1\": container with ID starting with 6e59fbf31d8a321468420e3c5a73f174f4ad5d51dc30362bc424c7c1edfd73f1 not found: ID does not exist" containerID="6e59fbf31d8a321468420e3c5a73f174f4ad5d51dc30362bc424c7c1edfd73f1" Sep 29 10:04:52 crc kubenswrapper[4779]: I0929 10:04:52.889201 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e59fbf31d8a321468420e3c5a73f174f4ad5d51dc30362bc424c7c1edfd73f1"} err="failed to get container status \"6e59fbf31d8a321468420e3c5a73f174f4ad5d51dc30362bc424c7c1edfd73f1\": rpc error: code = NotFound desc = could not find container \"6e59fbf31d8a321468420e3c5a73f174f4ad5d51dc30362bc424c7c1edfd73f1\": container with ID starting with 6e59fbf31d8a321468420e3c5a73f174f4ad5d51dc30362bc424c7c1edfd73f1 not found: ID does not exist" Sep 29 10:04:52 crc kubenswrapper[4779]: I0929 10:04:52.889215 4779 scope.go:117] "RemoveContainer" containerID="fa298e0e0bf233a5d299c51b6824d51ff002b6b411853739694d9633612a55b0" Sep 29 10:04:52 crc kubenswrapper[4779]: E0929 10:04:52.889479 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fa298e0e0bf233a5d299c51b6824d51ff002b6b411853739694d9633612a55b0\": container with ID starting with fa298e0e0bf233a5d299c51b6824d51ff002b6b411853739694d9633612a55b0 not found: ID does not exist" containerID="fa298e0e0bf233a5d299c51b6824d51ff002b6b411853739694d9633612a55b0" Sep 29 10:04:52 crc kubenswrapper[4779]: I0929 10:04:52.889513 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa298e0e0bf233a5d299c51b6824d51ff002b6b411853739694d9633612a55b0"} err="failed to get container status \"fa298e0e0bf233a5d299c51b6824d51ff002b6b411853739694d9633612a55b0\": rpc error: code = NotFound desc = could not find container \"fa298e0e0bf233a5d299c51b6824d51ff002b6b411853739694d9633612a55b0\": container with ID starting with fa298e0e0bf233a5d299c51b6824d51ff002b6b411853739694d9633612a55b0 not found: ID does not exist" Sep 29 10:04:54 crc kubenswrapper[4779]: I0929 10:04:54.726668 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c7595379-7da8-4421-9503-82d3059ebc98" path="/var/lib/kubelet/pods/c7595379-7da8-4421-9503-82d3059ebc98/volumes" Sep 29 10:05:00 crc kubenswrapper[4779]: I0929 10:05:00.645096 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-qt854"] Sep 29 10:05:00 crc kubenswrapper[4779]: I0929 10:05:00.660461 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-7vf2k"] Sep 29 10:05:00 crc kubenswrapper[4779]: I0929 10:05:00.673100 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-qt854"] Sep 29 10:05:00 crc kubenswrapper[4779]: I0929 10:05:00.683218 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-7vf2k"] Sep 29 10:05:00 crc kubenswrapper[4779]: I0929 10:05:00.692134 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zqmkz"] Sep 29 10:05:00 crc kubenswrapper[4779]: I0929 10:05:00.698517 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zqmkz"] Sep 29 10:05:00 crc kubenswrapper[4779]: I0929 10:05:00.704655 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-lflhq"] Sep 29 10:05:00 crc kubenswrapper[4779]: I0929 10:05:00.710445 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-ff8wp"] Sep 29 10:05:00 crc kubenswrapper[4779]: I0929 10:05:00.727163 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b27d7e-b6c0-4d17-9e24-2c4129089482" path="/var/lib/kubelet/pods/20b27d7e-b6c0-4d17-9e24-2c4129089482/volumes" Sep 29 10:05:00 crc kubenswrapper[4779]: I0929 10:05:00.728054 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ccb51e5d-bdcb-4381-b665-87023be5aa51" path="/var/lib/kubelet/pods/ccb51e5d-bdcb-4381-b665-87023be5aa51/volumes" Sep 29 10:05:00 crc kubenswrapper[4779]: I0929 10:05:00.728721 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d4c6aca3-1090-4ea5-b3ed-04c11ac7c455" path="/var/lib/kubelet/pods/d4c6aca3-1090-4ea5-b3ed-04c11ac7c455/volumes" Sep 29 10:05:00 crc kubenswrapper[4779]: I0929 10:05:00.729331 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nxx5t"] Sep 29 10:05:00 crc kubenswrapper[4779]: I0929 10:05:00.729364 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-nj5nz"] Sep 29 10:05:00 crc kubenswrapper[4779]: I0929 10:05:00.729377 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6s8z7"] Sep 29 10:05:00 crc kubenswrapper[4779]: I0929 10:05:00.734191 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nxx5t"] Sep 29 10:05:00 crc kubenswrapper[4779]: I0929 10:05:00.739856 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-nj5nz"] Sep 29 10:05:00 crc kubenswrapper[4779]: I0929 10:05:00.746252 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-ff8wp"] Sep 29 10:05:00 crc kubenswrapper[4779]: I0929 10:05:00.752435 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wmq5f"] Sep 29 10:05:00 crc kubenswrapper[4779]: I0929 10:05:00.758445 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6s8z7"] Sep 29 10:05:00 crc kubenswrapper[4779]: I0929 10:05:00.763976 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wmq5f"] Sep 29 10:05:00 crc kubenswrapper[4779]: I0929 10:05:00.769585 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-5v4bz"] Sep 29 10:05:00 crc kubenswrapper[4779]: I0929 10:05:00.792931 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-lflhq"] Sep 29 10:05:00 crc kubenswrapper[4779]: I0929 10:05:00.798375 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-5v4bz"] Sep 29 10:05:02 crc kubenswrapper[4779]: I0929 10:05:02.738533 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="14b18262-5ab6-43d1-8477-04f85881e4d0" path="/var/lib/kubelet/pods/14b18262-5ab6-43d1-8477-04f85881e4d0/volumes" Sep 29 10:05:02 crc kubenswrapper[4779]: I0929 10:05:02.741373 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="172c15cd-62e0-4fe1-9132-cb7df2f53783" path="/var/lib/kubelet/pods/172c15cd-62e0-4fe1-9132-cb7df2f53783/volumes" Sep 29 10:05:02 crc kubenswrapper[4779]: I0929 10:05:02.742796 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2be23c9e-69cc-40af-8823-6ce976a94c94" path="/var/lib/kubelet/pods/2be23c9e-69cc-40af-8823-6ce976a94c94/volumes" Sep 29 10:05:02 crc kubenswrapper[4779]: I0929 10:05:02.744411 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7f75becd-23f3-46d5-ae30-73c4518c571e" path="/var/lib/kubelet/pods/7f75becd-23f3-46d5-ae30-73c4518c571e/volumes" Sep 29 10:05:02 crc kubenswrapper[4779]: I0929 10:05:02.747077 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c99c3706-4911-4e0f-b822-23d0eeb2d2d9" path="/var/lib/kubelet/pods/c99c3706-4911-4e0f-b822-23d0eeb2d2d9/volumes" Sep 29 10:05:02 crc kubenswrapper[4779]: I0929 10:05:02.748481 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cadc8662-a6be-4cd3-8042-9bb980421260" path="/var/lib/kubelet/pods/cadc8662-a6be-4cd3-8042-9bb980421260/volumes" Sep 29 10:05:02 crc kubenswrapper[4779]: I0929 10:05:02.749948 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fd59bf76-22af-4461-b023-45c86757140d" path="/var/lib/kubelet/pods/fd59bf76-22af-4461-b023-45c86757140d/volumes" Sep 29 10:05:13 crc kubenswrapper[4779]: I0929 10:05:13.047895 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zt4xx"] Sep 29 10:05:13 crc kubenswrapper[4779]: E0929 10:05:13.049520 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7595379-7da8-4421-9503-82d3059ebc98" containerName="registry-server" Sep 29 10:05:13 crc kubenswrapper[4779]: I0929 10:05:13.049606 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7595379-7da8-4421-9503-82d3059ebc98" containerName="registry-server" Sep 29 10:05:13 crc kubenswrapper[4779]: E0929 10:05:13.049683 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7595379-7da8-4421-9503-82d3059ebc98" containerName="extract-utilities" Sep 29 10:05:13 crc kubenswrapper[4779]: I0929 10:05:13.049755 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7595379-7da8-4421-9503-82d3059ebc98" containerName="extract-utilities" Sep 29 10:05:13 crc kubenswrapper[4779]: E0929 10:05:13.049823 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7595379-7da8-4421-9503-82d3059ebc98" containerName="extract-content" Sep 29 10:05:13 crc kubenswrapper[4779]: I0929 10:05:13.049890 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7595379-7da8-4421-9503-82d3059ebc98" containerName="extract-content" Sep 29 10:05:13 crc kubenswrapper[4779]: I0929 10:05:13.050152 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="c7595379-7da8-4421-9503-82d3059ebc98" containerName="registry-server" Sep 29 10:05:13 crc kubenswrapper[4779]: I0929 10:05:13.050823 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zt4xx" Sep 29 10:05:13 crc kubenswrapper[4779]: I0929 10:05:13.052965 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 10:05:13 crc kubenswrapper[4779]: I0929 10:05:13.053147 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 10:05:13 crc kubenswrapper[4779]: I0929 10:05:13.053318 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Sep 29 10:05:13 crc kubenswrapper[4779]: I0929 10:05:13.053403 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-l7nsb" Sep 29 10:05:13 crc kubenswrapper[4779]: I0929 10:05:13.053530 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 10:05:13 crc kubenswrapper[4779]: I0929 10:05:13.074471 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zt4xx"] Sep 29 10:05:13 crc kubenswrapper[4779]: I0929 10:05:13.168787 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9d6cbe01-348f-4510-99a9-e5fa8799e2f7-ceph\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-zt4xx\" (UID: \"9d6cbe01-348f-4510-99a9-e5fa8799e2f7\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zt4xx" Sep 29 10:05:13 crc kubenswrapper[4779]: I0929 10:05:13.168884 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-29spt\" (UniqueName: \"kubernetes.io/projected/9d6cbe01-348f-4510-99a9-e5fa8799e2f7-kube-api-access-29spt\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-zt4xx\" (UID: \"9d6cbe01-348f-4510-99a9-e5fa8799e2f7\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zt4xx" Sep 29 10:05:13 crc kubenswrapper[4779]: I0929 10:05:13.168926 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d6cbe01-348f-4510-99a9-e5fa8799e2f7-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-zt4xx\" (UID: \"9d6cbe01-348f-4510-99a9-e5fa8799e2f7\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zt4xx" Sep 29 10:05:13 crc kubenswrapper[4779]: I0929 10:05:13.168960 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9d6cbe01-348f-4510-99a9-e5fa8799e2f7-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-zt4xx\" (UID: \"9d6cbe01-348f-4510-99a9-e5fa8799e2f7\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zt4xx" Sep 29 10:05:13 crc kubenswrapper[4779]: I0929 10:05:13.169111 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9d6cbe01-348f-4510-99a9-e5fa8799e2f7-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-zt4xx\" (UID: \"9d6cbe01-348f-4510-99a9-e5fa8799e2f7\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zt4xx" Sep 29 10:05:13 crc kubenswrapper[4779]: I0929 10:05:13.270946 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9d6cbe01-348f-4510-99a9-e5fa8799e2f7-ceph\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-zt4xx\" (UID: \"9d6cbe01-348f-4510-99a9-e5fa8799e2f7\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zt4xx" Sep 29 10:05:13 crc kubenswrapper[4779]: I0929 10:05:13.271078 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-29spt\" (UniqueName: \"kubernetes.io/projected/9d6cbe01-348f-4510-99a9-e5fa8799e2f7-kube-api-access-29spt\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-zt4xx\" (UID: \"9d6cbe01-348f-4510-99a9-e5fa8799e2f7\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zt4xx" Sep 29 10:05:13 crc kubenswrapper[4779]: I0929 10:05:13.271113 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d6cbe01-348f-4510-99a9-e5fa8799e2f7-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-zt4xx\" (UID: \"9d6cbe01-348f-4510-99a9-e5fa8799e2f7\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zt4xx" Sep 29 10:05:13 crc kubenswrapper[4779]: I0929 10:05:13.271157 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9d6cbe01-348f-4510-99a9-e5fa8799e2f7-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-zt4xx\" (UID: \"9d6cbe01-348f-4510-99a9-e5fa8799e2f7\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zt4xx" Sep 29 10:05:13 crc kubenswrapper[4779]: I0929 10:05:13.271231 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9d6cbe01-348f-4510-99a9-e5fa8799e2f7-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-zt4xx\" (UID: \"9d6cbe01-348f-4510-99a9-e5fa8799e2f7\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zt4xx" Sep 29 10:05:13 crc kubenswrapper[4779]: I0929 10:05:13.279802 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d6cbe01-348f-4510-99a9-e5fa8799e2f7-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-zt4xx\" (UID: \"9d6cbe01-348f-4510-99a9-e5fa8799e2f7\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zt4xx" Sep 29 10:05:13 crc kubenswrapper[4779]: I0929 10:05:13.280785 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9d6cbe01-348f-4510-99a9-e5fa8799e2f7-ceph\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-zt4xx\" (UID: \"9d6cbe01-348f-4510-99a9-e5fa8799e2f7\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zt4xx" Sep 29 10:05:13 crc kubenswrapper[4779]: I0929 10:05:13.283643 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9d6cbe01-348f-4510-99a9-e5fa8799e2f7-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-zt4xx\" (UID: \"9d6cbe01-348f-4510-99a9-e5fa8799e2f7\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zt4xx" Sep 29 10:05:13 crc kubenswrapper[4779]: I0929 10:05:13.286436 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9d6cbe01-348f-4510-99a9-e5fa8799e2f7-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-zt4xx\" (UID: \"9d6cbe01-348f-4510-99a9-e5fa8799e2f7\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zt4xx" Sep 29 10:05:13 crc kubenswrapper[4779]: I0929 10:05:13.291340 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-29spt\" (UniqueName: \"kubernetes.io/projected/9d6cbe01-348f-4510-99a9-e5fa8799e2f7-kube-api-access-29spt\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-zt4xx\" (UID: \"9d6cbe01-348f-4510-99a9-e5fa8799e2f7\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zt4xx" Sep 29 10:05:13 crc kubenswrapper[4779]: I0929 10:05:13.377376 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zt4xx" Sep 29 10:05:13 crc kubenswrapper[4779]: I0929 10:05:13.925665 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zt4xx"] Sep 29 10:05:13 crc kubenswrapper[4779]: I0929 10:05:13.969561 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zt4xx" event={"ID":"9d6cbe01-348f-4510-99a9-e5fa8799e2f7","Type":"ContainerStarted","Data":"20ce332b836e8fa50a1b80d7dfdb499c9153f090e2c436abf7322d4b68f0f7b2"} Sep 29 10:05:14 crc kubenswrapper[4779]: I0929 10:05:14.979723 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zt4xx" event={"ID":"9d6cbe01-348f-4510-99a9-e5fa8799e2f7","Type":"ContainerStarted","Data":"36095bea6960fd501db97e775fc6b406ddc33167d1aebfce5fa846a83cabf7eb"} Sep 29 10:05:14 crc kubenswrapper[4779]: I0929 10:05:14.997620 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zt4xx" podStartSLOduration=1.420377174 podStartE2EDuration="1.997599903s" podCreationTimestamp="2025-09-29 10:05:13 +0000 UTC" firstStartedPulling="2025-09-29 10:05:13.942471657 +0000 UTC m=+2145.923795561" lastFinishedPulling="2025-09-29 10:05:14.519694386 +0000 UTC m=+2146.501018290" observedRunningTime="2025-09-29 10:05:14.993011259 +0000 UTC m=+2146.974335183" watchObservedRunningTime="2025-09-29 10:05:14.997599903 +0000 UTC m=+2146.978923807" Sep 29 10:05:16 crc kubenswrapper[4779]: I0929 10:05:16.966388 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 10:05:16 crc kubenswrapper[4779]: I0929 10:05:16.966917 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 10:05:26 crc kubenswrapper[4779]: I0929 10:05:26.086660 4779 generic.go:334] "Generic (PLEG): container finished" podID="9d6cbe01-348f-4510-99a9-e5fa8799e2f7" containerID="36095bea6960fd501db97e775fc6b406ddc33167d1aebfce5fa846a83cabf7eb" exitCode=0 Sep 29 10:05:26 crc kubenswrapper[4779]: I0929 10:05:26.086770 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zt4xx" event={"ID":"9d6cbe01-348f-4510-99a9-e5fa8799e2f7","Type":"ContainerDied","Data":"36095bea6960fd501db97e775fc6b406ddc33167d1aebfce5fa846a83cabf7eb"} Sep 29 10:05:27 crc kubenswrapper[4779]: I0929 10:05:27.627041 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zt4xx" Sep 29 10:05:27 crc kubenswrapper[4779]: I0929 10:05:27.668049 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9d6cbe01-348f-4510-99a9-e5fa8799e2f7-ceph\") pod \"9d6cbe01-348f-4510-99a9-e5fa8799e2f7\" (UID: \"9d6cbe01-348f-4510-99a9-e5fa8799e2f7\") " Sep 29 10:05:27 crc kubenswrapper[4779]: I0929 10:05:27.668187 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d6cbe01-348f-4510-99a9-e5fa8799e2f7-repo-setup-combined-ca-bundle\") pod \"9d6cbe01-348f-4510-99a9-e5fa8799e2f7\" (UID: \"9d6cbe01-348f-4510-99a9-e5fa8799e2f7\") " Sep 29 10:05:27 crc kubenswrapper[4779]: I0929 10:05:27.668403 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-29spt\" (UniqueName: \"kubernetes.io/projected/9d6cbe01-348f-4510-99a9-e5fa8799e2f7-kube-api-access-29spt\") pod \"9d6cbe01-348f-4510-99a9-e5fa8799e2f7\" (UID: \"9d6cbe01-348f-4510-99a9-e5fa8799e2f7\") " Sep 29 10:05:27 crc kubenswrapper[4779]: I0929 10:05:27.668481 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9d6cbe01-348f-4510-99a9-e5fa8799e2f7-ssh-key\") pod \"9d6cbe01-348f-4510-99a9-e5fa8799e2f7\" (UID: \"9d6cbe01-348f-4510-99a9-e5fa8799e2f7\") " Sep 29 10:05:27 crc kubenswrapper[4779]: I0929 10:05:27.674784 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d6cbe01-348f-4510-99a9-e5fa8799e2f7-ceph" (OuterVolumeSpecName: "ceph") pod "9d6cbe01-348f-4510-99a9-e5fa8799e2f7" (UID: "9d6cbe01-348f-4510-99a9-e5fa8799e2f7"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:05:27 crc kubenswrapper[4779]: I0929 10:05:27.675302 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d6cbe01-348f-4510-99a9-e5fa8799e2f7-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "9d6cbe01-348f-4510-99a9-e5fa8799e2f7" (UID: "9d6cbe01-348f-4510-99a9-e5fa8799e2f7"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:05:27 crc kubenswrapper[4779]: I0929 10:05:27.678805 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d6cbe01-348f-4510-99a9-e5fa8799e2f7-kube-api-access-29spt" (OuterVolumeSpecName: "kube-api-access-29spt") pod "9d6cbe01-348f-4510-99a9-e5fa8799e2f7" (UID: "9d6cbe01-348f-4510-99a9-e5fa8799e2f7"). InnerVolumeSpecName "kube-api-access-29spt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 10:05:27 crc kubenswrapper[4779]: I0929 10:05:27.702097 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d6cbe01-348f-4510-99a9-e5fa8799e2f7-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "9d6cbe01-348f-4510-99a9-e5fa8799e2f7" (UID: "9d6cbe01-348f-4510-99a9-e5fa8799e2f7"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:05:27 crc kubenswrapper[4779]: I0929 10:05:27.772010 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9d6cbe01-348f-4510-99a9-e5fa8799e2f7-inventory\") pod \"9d6cbe01-348f-4510-99a9-e5fa8799e2f7\" (UID: \"9d6cbe01-348f-4510-99a9-e5fa8799e2f7\") " Sep 29 10:05:27 crc kubenswrapper[4779]: I0929 10:05:27.772748 4779 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d6cbe01-348f-4510-99a9-e5fa8799e2f7-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 10:05:27 crc kubenswrapper[4779]: I0929 10:05:27.772772 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-29spt\" (UniqueName: \"kubernetes.io/projected/9d6cbe01-348f-4510-99a9-e5fa8799e2f7-kube-api-access-29spt\") on node \"crc\" DevicePath \"\"" Sep 29 10:05:27 crc kubenswrapper[4779]: I0929 10:05:27.772782 4779 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9d6cbe01-348f-4510-99a9-e5fa8799e2f7-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 10:05:27 crc kubenswrapper[4779]: I0929 10:05:27.772791 4779 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9d6cbe01-348f-4510-99a9-e5fa8799e2f7-ceph\") on node \"crc\" DevicePath \"\"" Sep 29 10:05:27 crc kubenswrapper[4779]: I0929 10:05:27.793114 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d6cbe01-348f-4510-99a9-e5fa8799e2f7-inventory" (OuterVolumeSpecName: "inventory") pod "9d6cbe01-348f-4510-99a9-e5fa8799e2f7" (UID: "9d6cbe01-348f-4510-99a9-e5fa8799e2f7"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:05:27 crc kubenswrapper[4779]: I0929 10:05:27.874476 4779 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9d6cbe01-348f-4510-99a9-e5fa8799e2f7-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 10:05:28 crc kubenswrapper[4779]: I0929 10:05:28.107330 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zt4xx" event={"ID":"9d6cbe01-348f-4510-99a9-e5fa8799e2f7","Type":"ContainerDied","Data":"20ce332b836e8fa50a1b80d7dfdb499c9153f090e2c436abf7322d4b68f0f7b2"} Sep 29 10:05:28 crc kubenswrapper[4779]: I0929 10:05:28.107696 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="20ce332b836e8fa50a1b80d7dfdb499c9153f090e2c436abf7322d4b68f0f7b2" Sep 29 10:05:28 crc kubenswrapper[4779]: I0929 10:05:28.107386 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zt4xx" Sep 29 10:05:28 crc kubenswrapper[4779]: I0929 10:05:28.217852 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fgwfl"] Sep 29 10:05:28 crc kubenswrapper[4779]: E0929 10:05:28.218683 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d6cbe01-348f-4510-99a9-e5fa8799e2f7" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Sep 29 10:05:28 crc kubenswrapper[4779]: I0929 10:05:28.218702 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d6cbe01-348f-4510-99a9-e5fa8799e2f7" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Sep 29 10:05:28 crc kubenswrapper[4779]: I0929 10:05:28.219172 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="9d6cbe01-348f-4510-99a9-e5fa8799e2f7" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Sep 29 10:05:28 crc kubenswrapper[4779]: I0929 10:05:28.220115 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fgwfl" Sep 29 10:05:28 crc kubenswrapper[4779]: I0929 10:05:28.222858 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Sep 29 10:05:28 crc kubenswrapper[4779]: I0929 10:05:28.223656 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 10:05:28 crc kubenswrapper[4779]: I0929 10:05:28.223784 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 10:05:28 crc kubenswrapper[4779]: I0929 10:05:28.223816 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-l7nsb" Sep 29 10:05:28 crc kubenswrapper[4779]: I0929 10:05:28.224113 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 10:05:28 crc kubenswrapper[4779]: I0929 10:05:28.235241 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fgwfl"] Sep 29 10:05:28 crc kubenswrapper[4779]: I0929 10:05:28.281720 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bh629\" (UniqueName: \"kubernetes.io/projected/19b930c0-52e1-4476-a69e-289a0c246e21-kube-api-access-bh629\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-fgwfl\" (UID: \"19b930c0-52e1-4476-a69e-289a0c246e21\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fgwfl" Sep 29 10:05:28 crc kubenswrapper[4779]: I0929 10:05:28.281766 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/19b930c0-52e1-4476-a69e-289a0c246e21-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-fgwfl\" (UID: \"19b930c0-52e1-4476-a69e-289a0c246e21\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fgwfl" Sep 29 10:05:28 crc kubenswrapper[4779]: I0929 10:05:28.281890 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/19b930c0-52e1-4476-a69e-289a0c246e21-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-fgwfl\" (UID: \"19b930c0-52e1-4476-a69e-289a0c246e21\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fgwfl" Sep 29 10:05:28 crc kubenswrapper[4779]: I0929 10:05:28.281988 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/19b930c0-52e1-4476-a69e-289a0c246e21-ceph\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-fgwfl\" (UID: \"19b930c0-52e1-4476-a69e-289a0c246e21\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fgwfl" Sep 29 10:05:28 crc kubenswrapper[4779]: I0929 10:05:28.282077 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19b930c0-52e1-4476-a69e-289a0c246e21-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-fgwfl\" (UID: \"19b930c0-52e1-4476-a69e-289a0c246e21\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fgwfl" Sep 29 10:05:28 crc kubenswrapper[4779]: I0929 10:05:28.383340 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bh629\" (UniqueName: \"kubernetes.io/projected/19b930c0-52e1-4476-a69e-289a0c246e21-kube-api-access-bh629\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-fgwfl\" (UID: \"19b930c0-52e1-4476-a69e-289a0c246e21\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fgwfl" Sep 29 10:05:28 crc kubenswrapper[4779]: I0929 10:05:28.383385 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/19b930c0-52e1-4476-a69e-289a0c246e21-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-fgwfl\" (UID: \"19b930c0-52e1-4476-a69e-289a0c246e21\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fgwfl" Sep 29 10:05:28 crc kubenswrapper[4779]: I0929 10:05:28.383448 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/19b930c0-52e1-4476-a69e-289a0c246e21-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-fgwfl\" (UID: \"19b930c0-52e1-4476-a69e-289a0c246e21\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fgwfl" Sep 29 10:05:28 crc kubenswrapper[4779]: I0929 10:05:28.383481 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/19b930c0-52e1-4476-a69e-289a0c246e21-ceph\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-fgwfl\" (UID: \"19b930c0-52e1-4476-a69e-289a0c246e21\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fgwfl" Sep 29 10:05:28 crc kubenswrapper[4779]: I0929 10:05:28.383530 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19b930c0-52e1-4476-a69e-289a0c246e21-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-fgwfl\" (UID: \"19b930c0-52e1-4476-a69e-289a0c246e21\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fgwfl" Sep 29 10:05:28 crc kubenswrapper[4779]: I0929 10:05:28.387520 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/19b930c0-52e1-4476-a69e-289a0c246e21-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-fgwfl\" (UID: \"19b930c0-52e1-4476-a69e-289a0c246e21\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fgwfl" Sep 29 10:05:28 crc kubenswrapper[4779]: I0929 10:05:28.387562 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/19b930c0-52e1-4476-a69e-289a0c246e21-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-fgwfl\" (UID: \"19b930c0-52e1-4476-a69e-289a0c246e21\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fgwfl" Sep 29 10:05:28 crc kubenswrapper[4779]: I0929 10:05:28.387577 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/19b930c0-52e1-4476-a69e-289a0c246e21-ceph\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-fgwfl\" (UID: \"19b930c0-52e1-4476-a69e-289a0c246e21\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fgwfl" Sep 29 10:05:28 crc kubenswrapper[4779]: I0929 10:05:28.388141 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19b930c0-52e1-4476-a69e-289a0c246e21-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-fgwfl\" (UID: \"19b930c0-52e1-4476-a69e-289a0c246e21\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fgwfl" Sep 29 10:05:28 crc kubenswrapper[4779]: I0929 10:05:28.400330 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bh629\" (UniqueName: \"kubernetes.io/projected/19b930c0-52e1-4476-a69e-289a0c246e21-kube-api-access-bh629\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-fgwfl\" (UID: \"19b930c0-52e1-4476-a69e-289a0c246e21\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fgwfl" Sep 29 10:05:28 crc kubenswrapper[4779]: I0929 10:05:28.544078 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fgwfl" Sep 29 10:05:29 crc kubenswrapper[4779]: I0929 10:05:29.083295 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fgwfl"] Sep 29 10:05:29 crc kubenswrapper[4779]: W0929 10:05:29.085190 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod19b930c0_52e1_4476_a69e_289a0c246e21.slice/crio-c9717c0934bf06ba6e5356aa86d6a32bc7349ec843e40057b9d2a90f7768563b WatchSource:0}: Error finding container c9717c0934bf06ba6e5356aa86d6a32bc7349ec843e40057b9d2a90f7768563b: Status 404 returned error can't find the container with id c9717c0934bf06ba6e5356aa86d6a32bc7349ec843e40057b9d2a90f7768563b Sep 29 10:05:29 crc kubenswrapper[4779]: I0929 10:05:29.117590 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fgwfl" event={"ID":"19b930c0-52e1-4476-a69e-289a0c246e21","Type":"ContainerStarted","Data":"c9717c0934bf06ba6e5356aa86d6a32bc7349ec843e40057b9d2a90f7768563b"} Sep 29 10:05:29 crc kubenswrapper[4779]: I0929 10:05:29.789402 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 10:05:30 crc kubenswrapper[4779]: I0929 10:05:30.128747 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fgwfl" event={"ID":"19b930c0-52e1-4476-a69e-289a0c246e21","Type":"ContainerStarted","Data":"843a739b0e7ccdb1208e4bc3f0183eadae98ee9a82cfaf80ba34ffc144613bac"} Sep 29 10:05:31 crc kubenswrapper[4779]: I0929 10:05:31.169543 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fgwfl" podStartSLOduration=2.469964444 podStartE2EDuration="3.169513165s" podCreationTimestamp="2025-09-29 10:05:28 +0000 UTC" firstStartedPulling="2025-09-29 10:05:29.086962842 +0000 UTC m=+2161.068286736" lastFinishedPulling="2025-09-29 10:05:29.786511513 +0000 UTC m=+2161.767835457" observedRunningTime="2025-09-29 10:05:31.168276449 +0000 UTC m=+2163.149600393" watchObservedRunningTime="2025-09-29 10:05:31.169513165 +0000 UTC m=+2163.150837119" Sep 29 10:05:40 crc kubenswrapper[4779]: I0929 10:05:40.018161 4779 scope.go:117] "RemoveContainer" containerID="50e856a08f214e1a45d5aaaca645891abb1ce8917036edbe1548e1f41cdc7eb8" Sep 29 10:05:40 crc kubenswrapper[4779]: I0929 10:05:40.082714 4779 scope.go:117] "RemoveContainer" containerID="c7d76ce67495d2b1394c7e8c57f79933637ffa4cf59bd795f5711b1d4023617a" Sep 29 10:05:40 crc kubenswrapper[4779]: I0929 10:05:40.164254 4779 scope.go:117] "RemoveContainer" containerID="56a824f38aa1a52efd8ad9713f0fdd4c60d89348344bb5740bb1beb650642ccd" Sep 29 10:05:40 crc kubenswrapper[4779]: I0929 10:05:40.197127 4779 scope.go:117] "RemoveContainer" containerID="fb5c444fdeaa272950679204b6ef3e50e5899685ad0710f129afe88cac946f7d" Sep 29 10:05:40 crc kubenswrapper[4779]: I0929 10:05:40.267683 4779 scope.go:117] "RemoveContainer" containerID="e259da7b53f71a5e012ae25a260864a5614b1d8d26dc67744da478a709c30584" Sep 29 10:05:40 crc kubenswrapper[4779]: I0929 10:05:40.302590 4779 scope.go:117] "RemoveContainer" containerID="da5f46c2bee9022cc73ca1433a78e88db0f72656fa6ec556672cbbd3c18cd211" Sep 29 10:05:40 crc kubenswrapper[4779]: I0929 10:05:40.337602 4779 scope.go:117] "RemoveContainer" containerID="10d68b6fe4fd0865cbfabae4ae4692eddc6d448837c492b3c525bcfe291b9620" Sep 29 10:05:40 crc kubenswrapper[4779]: I0929 10:05:40.379891 4779 scope.go:117] "RemoveContainer" containerID="39580d875f8ba9d5e7015fdd0ed0b6e17ac80582283868151765aaec01589500" Sep 29 10:05:40 crc kubenswrapper[4779]: I0929 10:05:40.406376 4779 scope.go:117] "RemoveContainer" containerID="48673680e28603ce1e9c3a4396dcf505769fb313966f6269ed1ea23e0b4d11cb" Sep 29 10:05:40 crc kubenswrapper[4779]: I0929 10:05:40.460380 4779 scope.go:117] "RemoveContainer" containerID="e935d94c1390bd70e16904fec651fa490f2190987462f0903463e25abfd50756" Sep 29 10:05:46 crc kubenswrapper[4779]: I0929 10:05:46.965891 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 10:05:46 crc kubenswrapper[4779]: I0929 10:05:46.966467 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 10:06:16 crc kubenswrapper[4779]: I0929 10:06:16.966690 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 10:06:16 crc kubenswrapper[4779]: I0929 10:06:16.967488 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 10:06:16 crc kubenswrapper[4779]: I0929 10:06:16.967556 4779 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" Sep 29 10:06:16 crc kubenswrapper[4779]: I0929 10:06:16.968634 4779 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2ea7c57f54acd206248deed2c0656374ecfa2983e6d9220e44a227622fb5b4f8"} pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 10:06:16 crc kubenswrapper[4779]: I0929 10:06:16.968729 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" containerID="cri-o://2ea7c57f54acd206248deed2c0656374ecfa2983e6d9220e44a227622fb5b4f8" gracePeriod=600 Sep 29 10:06:17 crc kubenswrapper[4779]: I0929 10:06:17.574791 4779 generic.go:334] "Generic (PLEG): container finished" podID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerID="2ea7c57f54acd206248deed2c0656374ecfa2983e6d9220e44a227622fb5b4f8" exitCode=0 Sep 29 10:06:17 crc kubenswrapper[4779]: I0929 10:06:17.574861 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" event={"ID":"f1a5d3a7-37d9-4a87-864c-e4af7f504a19","Type":"ContainerDied","Data":"2ea7c57f54acd206248deed2c0656374ecfa2983e6d9220e44a227622fb5b4f8"} Sep 29 10:06:17 crc kubenswrapper[4779]: I0929 10:06:17.575436 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" event={"ID":"f1a5d3a7-37d9-4a87-864c-e4af7f504a19","Type":"ContainerStarted","Data":"6c3b4d9e1288a1d587469d8a17efdcacdfb592049e65b194a12258479174566e"} Sep 29 10:06:17 crc kubenswrapper[4779]: I0929 10:06:17.575463 4779 scope.go:117] "RemoveContainer" containerID="e12405b2f177552bd75b0aa0910938d5b67c3d7c2041800b3176b0508ac82f97" Sep 29 10:07:04 crc kubenswrapper[4779]: I0929 10:07:04.481033 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-d8fjl"] Sep 29 10:07:04 crc kubenswrapper[4779]: I0929 10:07:04.483507 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-d8fjl" Sep 29 10:07:04 crc kubenswrapper[4779]: I0929 10:07:04.492951 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-d8fjl"] Sep 29 10:07:04 crc kubenswrapper[4779]: I0929 10:07:04.609404 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee57e929-5dcd-4d1e-8796-906e06c64817-catalog-content\") pod \"redhat-marketplace-d8fjl\" (UID: \"ee57e929-5dcd-4d1e-8796-906e06c64817\") " pod="openshift-marketplace/redhat-marketplace-d8fjl" Sep 29 10:07:04 crc kubenswrapper[4779]: I0929 10:07:04.609533 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee57e929-5dcd-4d1e-8796-906e06c64817-utilities\") pod \"redhat-marketplace-d8fjl\" (UID: \"ee57e929-5dcd-4d1e-8796-906e06c64817\") " pod="openshift-marketplace/redhat-marketplace-d8fjl" Sep 29 10:07:04 crc kubenswrapper[4779]: I0929 10:07:04.609587 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6q5bb\" (UniqueName: \"kubernetes.io/projected/ee57e929-5dcd-4d1e-8796-906e06c64817-kube-api-access-6q5bb\") pod \"redhat-marketplace-d8fjl\" (UID: \"ee57e929-5dcd-4d1e-8796-906e06c64817\") " pod="openshift-marketplace/redhat-marketplace-d8fjl" Sep 29 10:07:04 crc kubenswrapper[4779]: I0929 10:07:04.711832 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6q5bb\" (UniqueName: \"kubernetes.io/projected/ee57e929-5dcd-4d1e-8796-906e06c64817-kube-api-access-6q5bb\") pod \"redhat-marketplace-d8fjl\" (UID: \"ee57e929-5dcd-4d1e-8796-906e06c64817\") " pod="openshift-marketplace/redhat-marketplace-d8fjl" Sep 29 10:07:04 crc kubenswrapper[4779]: I0929 10:07:04.712020 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee57e929-5dcd-4d1e-8796-906e06c64817-catalog-content\") pod \"redhat-marketplace-d8fjl\" (UID: \"ee57e929-5dcd-4d1e-8796-906e06c64817\") " pod="openshift-marketplace/redhat-marketplace-d8fjl" Sep 29 10:07:04 crc kubenswrapper[4779]: I0929 10:07:04.712109 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee57e929-5dcd-4d1e-8796-906e06c64817-utilities\") pod \"redhat-marketplace-d8fjl\" (UID: \"ee57e929-5dcd-4d1e-8796-906e06c64817\") " pod="openshift-marketplace/redhat-marketplace-d8fjl" Sep 29 10:07:04 crc kubenswrapper[4779]: I0929 10:07:04.712612 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee57e929-5dcd-4d1e-8796-906e06c64817-catalog-content\") pod \"redhat-marketplace-d8fjl\" (UID: \"ee57e929-5dcd-4d1e-8796-906e06c64817\") " pod="openshift-marketplace/redhat-marketplace-d8fjl" Sep 29 10:07:04 crc kubenswrapper[4779]: I0929 10:07:04.712729 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee57e929-5dcd-4d1e-8796-906e06c64817-utilities\") pod \"redhat-marketplace-d8fjl\" (UID: \"ee57e929-5dcd-4d1e-8796-906e06c64817\") " pod="openshift-marketplace/redhat-marketplace-d8fjl" Sep 29 10:07:04 crc kubenswrapper[4779]: I0929 10:07:04.738846 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6q5bb\" (UniqueName: \"kubernetes.io/projected/ee57e929-5dcd-4d1e-8796-906e06c64817-kube-api-access-6q5bb\") pod \"redhat-marketplace-d8fjl\" (UID: \"ee57e929-5dcd-4d1e-8796-906e06c64817\") " pod="openshift-marketplace/redhat-marketplace-d8fjl" Sep 29 10:07:04 crc kubenswrapper[4779]: I0929 10:07:04.850813 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-d8fjl" Sep 29 10:07:05 crc kubenswrapper[4779]: I0929 10:07:05.300854 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-d8fjl"] Sep 29 10:07:05 crc kubenswrapper[4779]: W0929 10:07:05.313125 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podee57e929_5dcd_4d1e_8796_906e06c64817.slice/crio-51abed060b573c5436f053bc3a25743e3337f651c9e3c8c05b55836ebd483611 WatchSource:0}: Error finding container 51abed060b573c5436f053bc3a25743e3337f651c9e3c8c05b55836ebd483611: Status 404 returned error can't find the container with id 51abed060b573c5436f053bc3a25743e3337f651c9e3c8c05b55836ebd483611 Sep 29 10:07:05 crc kubenswrapper[4779]: I0929 10:07:05.992436 4779 generic.go:334] "Generic (PLEG): container finished" podID="ee57e929-5dcd-4d1e-8796-906e06c64817" containerID="d51ce2175e51fab877734fe7293b719bd1c73ac4a4aba25c610c0d0da00935f4" exitCode=0 Sep 29 10:07:05 crc kubenswrapper[4779]: I0929 10:07:05.992529 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d8fjl" event={"ID":"ee57e929-5dcd-4d1e-8796-906e06c64817","Type":"ContainerDied","Data":"d51ce2175e51fab877734fe7293b719bd1c73ac4a4aba25c610c0d0da00935f4"} Sep 29 10:07:05 crc kubenswrapper[4779]: I0929 10:07:05.993648 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d8fjl" event={"ID":"ee57e929-5dcd-4d1e-8796-906e06c64817","Type":"ContainerStarted","Data":"51abed060b573c5436f053bc3a25743e3337f651c9e3c8c05b55836ebd483611"} Sep 29 10:07:08 crc kubenswrapper[4779]: I0929 10:07:08.015841 4779 generic.go:334] "Generic (PLEG): container finished" podID="ee57e929-5dcd-4d1e-8796-906e06c64817" containerID="8e7ef548ecd4c3b9444b89e5abdd670808c14aa246c4b4f66ab4686d461420ce" exitCode=0 Sep 29 10:07:08 crc kubenswrapper[4779]: I0929 10:07:08.015915 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d8fjl" event={"ID":"ee57e929-5dcd-4d1e-8796-906e06c64817","Type":"ContainerDied","Data":"8e7ef548ecd4c3b9444b89e5abdd670808c14aa246c4b4f66ab4686d461420ce"} Sep 29 10:07:10 crc kubenswrapper[4779]: I0929 10:07:10.032293 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d8fjl" event={"ID":"ee57e929-5dcd-4d1e-8796-906e06c64817","Type":"ContainerStarted","Data":"38c44f4a44e2cae94fd3544422a3816426f06a2672d008a8cfd5c14604e9ba8d"} Sep 29 10:07:10 crc kubenswrapper[4779]: I0929 10:07:10.051218 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-d8fjl" podStartSLOduration=2.782103482 podStartE2EDuration="6.051200168s" podCreationTimestamp="2025-09-29 10:07:04 +0000 UTC" firstStartedPulling="2025-09-29 10:07:05.994396754 +0000 UTC m=+2257.975720658" lastFinishedPulling="2025-09-29 10:07:09.26349344 +0000 UTC m=+2261.244817344" observedRunningTime="2025-09-29 10:07:10.048465818 +0000 UTC m=+2262.029789722" watchObservedRunningTime="2025-09-29 10:07:10.051200168 +0000 UTC m=+2262.032524072" Sep 29 10:07:14 crc kubenswrapper[4779]: I0929 10:07:14.852504 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-d8fjl" Sep 29 10:07:14 crc kubenswrapper[4779]: I0929 10:07:14.853106 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-d8fjl" Sep 29 10:07:14 crc kubenswrapper[4779]: I0929 10:07:14.915340 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-d8fjl" Sep 29 10:07:15 crc kubenswrapper[4779]: I0929 10:07:15.138125 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-d8fjl" Sep 29 10:07:15 crc kubenswrapper[4779]: I0929 10:07:15.192668 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-d8fjl"] Sep 29 10:07:17 crc kubenswrapper[4779]: I0929 10:07:17.102463 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-d8fjl" podUID="ee57e929-5dcd-4d1e-8796-906e06c64817" containerName="registry-server" containerID="cri-o://38c44f4a44e2cae94fd3544422a3816426f06a2672d008a8cfd5c14604e9ba8d" gracePeriod=2 Sep 29 10:07:18 crc kubenswrapper[4779]: I0929 10:07:18.113037 4779 generic.go:334] "Generic (PLEG): container finished" podID="ee57e929-5dcd-4d1e-8796-906e06c64817" containerID="38c44f4a44e2cae94fd3544422a3816426f06a2672d008a8cfd5c14604e9ba8d" exitCode=0 Sep 29 10:07:18 crc kubenswrapper[4779]: I0929 10:07:18.113092 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d8fjl" event={"ID":"ee57e929-5dcd-4d1e-8796-906e06c64817","Type":"ContainerDied","Data":"38c44f4a44e2cae94fd3544422a3816426f06a2672d008a8cfd5c14604e9ba8d"} Sep 29 10:07:18 crc kubenswrapper[4779]: I0929 10:07:18.113553 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d8fjl" event={"ID":"ee57e929-5dcd-4d1e-8796-906e06c64817","Type":"ContainerDied","Data":"51abed060b573c5436f053bc3a25743e3337f651c9e3c8c05b55836ebd483611"} Sep 29 10:07:18 crc kubenswrapper[4779]: I0929 10:07:18.113569 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="51abed060b573c5436f053bc3a25743e3337f651c9e3c8c05b55836ebd483611" Sep 29 10:07:18 crc kubenswrapper[4779]: I0929 10:07:18.129279 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-d8fjl" Sep 29 10:07:18 crc kubenswrapper[4779]: I0929 10:07:18.257672 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6q5bb\" (UniqueName: \"kubernetes.io/projected/ee57e929-5dcd-4d1e-8796-906e06c64817-kube-api-access-6q5bb\") pod \"ee57e929-5dcd-4d1e-8796-906e06c64817\" (UID: \"ee57e929-5dcd-4d1e-8796-906e06c64817\") " Sep 29 10:07:18 crc kubenswrapper[4779]: I0929 10:07:18.257781 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee57e929-5dcd-4d1e-8796-906e06c64817-utilities\") pod \"ee57e929-5dcd-4d1e-8796-906e06c64817\" (UID: \"ee57e929-5dcd-4d1e-8796-906e06c64817\") " Sep 29 10:07:18 crc kubenswrapper[4779]: I0929 10:07:18.257970 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee57e929-5dcd-4d1e-8796-906e06c64817-catalog-content\") pod \"ee57e929-5dcd-4d1e-8796-906e06c64817\" (UID: \"ee57e929-5dcd-4d1e-8796-906e06c64817\") " Sep 29 10:07:18 crc kubenswrapper[4779]: I0929 10:07:18.258923 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ee57e929-5dcd-4d1e-8796-906e06c64817-utilities" (OuterVolumeSpecName: "utilities") pod "ee57e929-5dcd-4d1e-8796-906e06c64817" (UID: "ee57e929-5dcd-4d1e-8796-906e06c64817"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 10:07:18 crc kubenswrapper[4779]: I0929 10:07:18.264743 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee57e929-5dcd-4d1e-8796-906e06c64817-kube-api-access-6q5bb" (OuterVolumeSpecName: "kube-api-access-6q5bb") pod "ee57e929-5dcd-4d1e-8796-906e06c64817" (UID: "ee57e929-5dcd-4d1e-8796-906e06c64817"). InnerVolumeSpecName "kube-api-access-6q5bb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 10:07:18 crc kubenswrapper[4779]: I0929 10:07:18.280929 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ee57e929-5dcd-4d1e-8796-906e06c64817-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ee57e929-5dcd-4d1e-8796-906e06c64817" (UID: "ee57e929-5dcd-4d1e-8796-906e06c64817"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 10:07:18 crc kubenswrapper[4779]: I0929 10:07:18.360228 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee57e929-5dcd-4d1e-8796-906e06c64817-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 10:07:18 crc kubenswrapper[4779]: I0929 10:07:18.360259 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee57e929-5dcd-4d1e-8796-906e06c64817-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 10:07:18 crc kubenswrapper[4779]: I0929 10:07:18.360271 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6q5bb\" (UniqueName: \"kubernetes.io/projected/ee57e929-5dcd-4d1e-8796-906e06c64817-kube-api-access-6q5bb\") on node \"crc\" DevicePath \"\"" Sep 29 10:07:19 crc kubenswrapper[4779]: I0929 10:07:19.122424 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-d8fjl" Sep 29 10:07:19 crc kubenswrapper[4779]: I0929 10:07:19.152575 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-d8fjl"] Sep 29 10:07:19 crc kubenswrapper[4779]: I0929 10:07:19.161784 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-d8fjl"] Sep 29 10:07:20 crc kubenswrapper[4779]: I0929 10:07:20.724382 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ee57e929-5dcd-4d1e-8796-906e06c64817" path="/var/lib/kubelet/pods/ee57e929-5dcd-4d1e-8796-906e06c64817/volumes" Sep 29 10:07:23 crc kubenswrapper[4779]: I0929 10:07:23.154797 4779 generic.go:334] "Generic (PLEG): container finished" podID="19b930c0-52e1-4476-a69e-289a0c246e21" containerID="843a739b0e7ccdb1208e4bc3f0183eadae98ee9a82cfaf80ba34ffc144613bac" exitCode=0 Sep 29 10:07:23 crc kubenswrapper[4779]: I0929 10:07:23.154882 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fgwfl" event={"ID":"19b930c0-52e1-4476-a69e-289a0c246e21","Type":"ContainerDied","Data":"843a739b0e7ccdb1208e4bc3f0183eadae98ee9a82cfaf80ba34ffc144613bac"} Sep 29 10:07:24 crc kubenswrapper[4779]: I0929 10:07:24.636255 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fgwfl" Sep 29 10:07:24 crc kubenswrapper[4779]: I0929 10:07:24.678108 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/19b930c0-52e1-4476-a69e-289a0c246e21-ssh-key\") pod \"19b930c0-52e1-4476-a69e-289a0c246e21\" (UID: \"19b930c0-52e1-4476-a69e-289a0c246e21\") " Sep 29 10:07:24 crc kubenswrapper[4779]: I0929 10:07:24.678451 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19b930c0-52e1-4476-a69e-289a0c246e21-bootstrap-combined-ca-bundle\") pod \"19b930c0-52e1-4476-a69e-289a0c246e21\" (UID: \"19b930c0-52e1-4476-a69e-289a0c246e21\") " Sep 29 10:07:24 crc kubenswrapper[4779]: I0929 10:07:24.678526 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/19b930c0-52e1-4476-a69e-289a0c246e21-ceph\") pod \"19b930c0-52e1-4476-a69e-289a0c246e21\" (UID: \"19b930c0-52e1-4476-a69e-289a0c246e21\") " Sep 29 10:07:24 crc kubenswrapper[4779]: I0929 10:07:24.678587 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bh629\" (UniqueName: \"kubernetes.io/projected/19b930c0-52e1-4476-a69e-289a0c246e21-kube-api-access-bh629\") pod \"19b930c0-52e1-4476-a69e-289a0c246e21\" (UID: \"19b930c0-52e1-4476-a69e-289a0c246e21\") " Sep 29 10:07:24 crc kubenswrapper[4779]: I0929 10:07:24.678653 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/19b930c0-52e1-4476-a69e-289a0c246e21-inventory\") pod \"19b930c0-52e1-4476-a69e-289a0c246e21\" (UID: \"19b930c0-52e1-4476-a69e-289a0c246e21\") " Sep 29 10:07:24 crc kubenswrapper[4779]: I0929 10:07:24.685836 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19b930c0-52e1-4476-a69e-289a0c246e21-ceph" (OuterVolumeSpecName: "ceph") pod "19b930c0-52e1-4476-a69e-289a0c246e21" (UID: "19b930c0-52e1-4476-a69e-289a0c246e21"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:07:24 crc kubenswrapper[4779]: I0929 10:07:24.686495 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/19b930c0-52e1-4476-a69e-289a0c246e21-kube-api-access-bh629" (OuterVolumeSpecName: "kube-api-access-bh629") pod "19b930c0-52e1-4476-a69e-289a0c246e21" (UID: "19b930c0-52e1-4476-a69e-289a0c246e21"). InnerVolumeSpecName "kube-api-access-bh629". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 10:07:24 crc kubenswrapper[4779]: I0929 10:07:24.700143 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19b930c0-52e1-4476-a69e-289a0c246e21-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "19b930c0-52e1-4476-a69e-289a0c246e21" (UID: "19b930c0-52e1-4476-a69e-289a0c246e21"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:07:24 crc kubenswrapper[4779]: I0929 10:07:24.709783 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19b930c0-52e1-4476-a69e-289a0c246e21-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "19b930c0-52e1-4476-a69e-289a0c246e21" (UID: "19b930c0-52e1-4476-a69e-289a0c246e21"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:07:24 crc kubenswrapper[4779]: I0929 10:07:24.711174 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19b930c0-52e1-4476-a69e-289a0c246e21-inventory" (OuterVolumeSpecName: "inventory") pod "19b930c0-52e1-4476-a69e-289a0c246e21" (UID: "19b930c0-52e1-4476-a69e-289a0c246e21"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:07:24 crc kubenswrapper[4779]: I0929 10:07:24.781322 4779 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/19b930c0-52e1-4476-a69e-289a0c246e21-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 10:07:24 crc kubenswrapper[4779]: I0929 10:07:24.781381 4779 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19b930c0-52e1-4476-a69e-289a0c246e21-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 10:07:24 crc kubenswrapper[4779]: I0929 10:07:24.781405 4779 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/19b930c0-52e1-4476-a69e-289a0c246e21-ceph\") on node \"crc\" DevicePath \"\"" Sep 29 10:07:24 crc kubenswrapper[4779]: I0929 10:07:24.781415 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bh629\" (UniqueName: \"kubernetes.io/projected/19b930c0-52e1-4476-a69e-289a0c246e21-kube-api-access-bh629\") on node \"crc\" DevicePath \"\"" Sep 29 10:07:24 crc kubenswrapper[4779]: I0929 10:07:24.781428 4779 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/19b930c0-52e1-4476-a69e-289a0c246e21-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 10:07:25 crc kubenswrapper[4779]: I0929 10:07:25.176571 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fgwfl" event={"ID":"19b930c0-52e1-4476-a69e-289a0c246e21","Type":"ContainerDied","Data":"c9717c0934bf06ba6e5356aa86d6a32bc7349ec843e40057b9d2a90f7768563b"} Sep 29 10:07:25 crc kubenswrapper[4779]: I0929 10:07:25.176603 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fgwfl" Sep 29 10:07:25 crc kubenswrapper[4779]: I0929 10:07:25.176618 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c9717c0934bf06ba6e5356aa86d6a32bc7349ec843e40057b9d2a90f7768563b" Sep 29 10:07:25 crc kubenswrapper[4779]: I0929 10:07:25.265875 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-fnvx4"] Sep 29 10:07:25 crc kubenswrapper[4779]: E0929 10:07:25.266350 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee57e929-5dcd-4d1e-8796-906e06c64817" containerName="extract-content" Sep 29 10:07:25 crc kubenswrapper[4779]: I0929 10:07:25.266376 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee57e929-5dcd-4d1e-8796-906e06c64817" containerName="extract-content" Sep 29 10:07:25 crc kubenswrapper[4779]: E0929 10:07:25.266405 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19b930c0-52e1-4476-a69e-289a0c246e21" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Sep 29 10:07:25 crc kubenswrapper[4779]: I0929 10:07:25.266416 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="19b930c0-52e1-4476-a69e-289a0c246e21" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Sep 29 10:07:25 crc kubenswrapper[4779]: E0929 10:07:25.266452 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee57e929-5dcd-4d1e-8796-906e06c64817" containerName="registry-server" Sep 29 10:07:25 crc kubenswrapper[4779]: I0929 10:07:25.266461 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee57e929-5dcd-4d1e-8796-906e06c64817" containerName="registry-server" Sep 29 10:07:25 crc kubenswrapper[4779]: E0929 10:07:25.266473 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee57e929-5dcd-4d1e-8796-906e06c64817" containerName="extract-utilities" Sep 29 10:07:25 crc kubenswrapper[4779]: I0929 10:07:25.266483 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee57e929-5dcd-4d1e-8796-906e06c64817" containerName="extract-utilities" Sep 29 10:07:25 crc kubenswrapper[4779]: I0929 10:07:25.266671 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="19b930c0-52e1-4476-a69e-289a0c246e21" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Sep 29 10:07:25 crc kubenswrapper[4779]: I0929 10:07:25.266692 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee57e929-5dcd-4d1e-8796-906e06c64817" containerName="registry-server" Sep 29 10:07:25 crc kubenswrapper[4779]: I0929 10:07:25.273551 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-fnvx4" Sep 29 10:07:25 crc kubenswrapper[4779]: I0929 10:07:25.275376 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-fnvx4"] Sep 29 10:07:25 crc kubenswrapper[4779]: I0929 10:07:25.275437 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Sep 29 10:07:25 crc kubenswrapper[4779]: I0929 10:07:25.276897 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 10:07:25 crc kubenswrapper[4779]: I0929 10:07:25.277293 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 10:07:25 crc kubenswrapper[4779]: I0929 10:07:25.278123 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-l7nsb" Sep 29 10:07:25 crc kubenswrapper[4779]: I0929 10:07:25.279316 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 10:07:25 crc kubenswrapper[4779]: I0929 10:07:25.290763 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d1ac3840-3da4-4a8a-bc05-99181c55d968-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-fnvx4\" (UID: \"d1ac3840-3da4-4a8a-bc05-99181c55d968\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-fnvx4" Sep 29 10:07:25 crc kubenswrapper[4779]: I0929 10:07:25.290839 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d1ac3840-3da4-4a8a-bc05-99181c55d968-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-fnvx4\" (UID: \"d1ac3840-3da4-4a8a-bc05-99181c55d968\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-fnvx4" Sep 29 10:07:25 crc kubenswrapper[4779]: I0929 10:07:25.291387 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dmtsg\" (UniqueName: \"kubernetes.io/projected/d1ac3840-3da4-4a8a-bc05-99181c55d968-kube-api-access-dmtsg\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-fnvx4\" (UID: \"d1ac3840-3da4-4a8a-bc05-99181c55d968\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-fnvx4" Sep 29 10:07:25 crc kubenswrapper[4779]: I0929 10:07:25.291447 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d1ac3840-3da4-4a8a-bc05-99181c55d968-ceph\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-fnvx4\" (UID: \"d1ac3840-3da4-4a8a-bc05-99181c55d968\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-fnvx4" Sep 29 10:07:25 crc kubenswrapper[4779]: I0929 10:07:25.392706 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dmtsg\" (UniqueName: \"kubernetes.io/projected/d1ac3840-3da4-4a8a-bc05-99181c55d968-kube-api-access-dmtsg\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-fnvx4\" (UID: \"d1ac3840-3da4-4a8a-bc05-99181c55d968\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-fnvx4" Sep 29 10:07:25 crc kubenswrapper[4779]: I0929 10:07:25.393170 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d1ac3840-3da4-4a8a-bc05-99181c55d968-ceph\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-fnvx4\" (UID: \"d1ac3840-3da4-4a8a-bc05-99181c55d968\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-fnvx4" Sep 29 10:07:25 crc kubenswrapper[4779]: I0929 10:07:25.393296 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d1ac3840-3da4-4a8a-bc05-99181c55d968-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-fnvx4\" (UID: \"d1ac3840-3da4-4a8a-bc05-99181c55d968\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-fnvx4" Sep 29 10:07:25 crc kubenswrapper[4779]: I0929 10:07:25.393334 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d1ac3840-3da4-4a8a-bc05-99181c55d968-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-fnvx4\" (UID: \"d1ac3840-3da4-4a8a-bc05-99181c55d968\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-fnvx4" Sep 29 10:07:25 crc kubenswrapper[4779]: I0929 10:07:25.397944 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d1ac3840-3da4-4a8a-bc05-99181c55d968-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-fnvx4\" (UID: \"d1ac3840-3da4-4a8a-bc05-99181c55d968\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-fnvx4" Sep 29 10:07:25 crc kubenswrapper[4779]: I0929 10:07:25.401491 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d1ac3840-3da4-4a8a-bc05-99181c55d968-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-fnvx4\" (UID: \"d1ac3840-3da4-4a8a-bc05-99181c55d968\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-fnvx4" Sep 29 10:07:25 crc kubenswrapper[4779]: I0929 10:07:25.401709 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d1ac3840-3da4-4a8a-bc05-99181c55d968-ceph\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-fnvx4\" (UID: \"d1ac3840-3da4-4a8a-bc05-99181c55d968\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-fnvx4" Sep 29 10:07:25 crc kubenswrapper[4779]: I0929 10:07:25.409018 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dmtsg\" (UniqueName: \"kubernetes.io/projected/d1ac3840-3da4-4a8a-bc05-99181c55d968-kube-api-access-dmtsg\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-fnvx4\" (UID: \"d1ac3840-3da4-4a8a-bc05-99181c55d968\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-fnvx4" Sep 29 10:07:25 crc kubenswrapper[4779]: I0929 10:07:25.591605 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-fnvx4" Sep 29 10:07:26 crc kubenswrapper[4779]: I0929 10:07:26.143048 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-fnvx4"] Sep 29 10:07:26 crc kubenswrapper[4779]: I0929 10:07:26.184179 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-fnvx4" event={"ID":"d1ac3840-3da4-4a8a-bc05-99181c55d968","Type":"ContainerStarted","Data":"5b07af12fe7a2dcaa14e343e70d344c810fd88c3516051812236b1ed8ee296f7"} Sep 29 10:07:27 crc kubenswrapper[4779]: I0929 10:07:27.194170 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-fnvx4" event={"ID":"d1ac3840-3da4-4a8a-bc05-99181c55d968","Type":"ContainerStarted","Data":"87242d687917d52e418e3eea908943d958af87eb1e8f005ab7e38dffebbc1246"} Sep 29 10:07:27 crc kubenswrapper[4779]: I0929 10:07:27.210887 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-fnvx4" podStartSLOduration=1.783853799 podStartE2EDuration="2.210865414s" podCreationTimestamp="2025-09-29 10:07:25 +0000 UTC" firstStartedPulling="2025-09-29 10:07:26.149885058 +0000 UTC m=+2278.131208962" lastFinishedPulling="2025-09-29 10:07:26.576896673 +0000 UTC m=+2278.558220577" observedRunningTime="2025-09-29 10:07:27.209992029 +0000 UTC m=+2279.191315953" watchObservedRunningTime="2025-09-29 10:07:27.210865414 +0000 UTC m=+2279.192189318" Sep 29 10:07:52 crc kubenswrapper[4779]: I0929 10:07:52.447020 4779 generic.go:334] "Generic (PLEG): container finished" podID="d1ac3840-3da4-4a8a-bc05-99181c55d968" containerID="87242d687917d52e418e3eea908943d958af87eb1e8f005ab7e38dffebbc1246" exitCode=0 Sep 29 10:07:52 crc kubenswrapper[4779]: I0929 10:07:52.447108 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-fnvx4" event={"ID":"d1ac3840-3da4-4a8a-bc05-99181c55d968","Type":"ContainerDied","Data":"87242d687917d52e418e3eea908943d958af87eb1e8f005ab7e38dffebbc1246"} Sep 29 10:07:53 crc kubenswrapper[4779]: I0929 10:07:53.903504 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-fnvx4" Sep 29 10:07:54 crc kubenswrapper[4779]: I0929 10:07:54.031444 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d1ac3840-3da4-4a8a-bc05-99181c55d968-ssh-key\") pod \"d1ac3840-3da4-4a8a-bc05-99181c55d968\" (UID: \"d1ac3840-3da4-4a8a-bc05-99181c55d968\") " Sep 29 10:07:54 crc kubenswrapper[4779]: I0929 10:07:54.031501 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d1ac3840-3da4-4a8a-bc05-99181c55d968-inventory\") pod \"d1ac3840-3da4-4a8a-bc05-99181c55d968\" (UID: \"d1ac3840-3da4-4a8a-bc05-99181c55d968\") " Sep 29 10:07:54 crc kubenswrapper[4779]: I0929 10:07:54.031623 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dmtsg\" (UniqueName: \"kubernetes.io/projected/d1ac3840-3da4-4a8a-bc05-99181c55d968-kube-api-access-dmtsg\") pod \"d1ac3840-3da4-4a8a-bc05-99181c55d968\" (UID: \"d1ac3840-3da4-4a8a-bc05-99181c55d968\") " Sep 29 10:07:54 crc kubenswrapper[4779]: I0929 10:07:54.031649 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d1ac3840-3da4-4a8a-bc05-99181c55d968-ceph\") pod \"d1ac3840-3da4-4a8a-bc05-99181c55d968\" (UID: \"d1ac3840-3da4-4a8a-bc05-99181c55d968\") " Sep 29 10:07:54 crc kubenswrapper[4779]: I0929 10:07:54.040526 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d1ac3840-3da4-4a8a-bc05-99181c55d968-ceph" (OuterVolumeSpecName: "ceph") pod "d1ac3840-3da4-4a8a-bc05-99181c55d968" (UID: "d1ac3840-3da4-4a8a-bc05-99181c55d968"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:07:54 crc kubenswrapper[4779]: I0929 10:07:54.041195 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d1ac3840-3da4-4a8a-bc05-99181c55d968-kube-api-access-dmtsg" (OuterVolumeSpecName: "kube-api-access-dmtsg") pod "d1ac3840-3da4-4a8a-bc05-99181c55d968" (UID: "d1ac3840-3da4-4a8a-bc05-99181c55d968"). InnerVolumeSpecName "kube-api-access-dmtsg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 10:07:54 crc kubenswrapper[4779]: I0929 10:07:54.059233 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d1ac3840-3da4-4a8a-bc05-99181c55d968-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "d1ac3840-3da4-4a8a-bc05-99181c55d968" (UID: "d1ac3840-3da4-4a8a-bc05-99181c55d968"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:07:54 crc kubenswrapper[4779]: I0929 10:07:54.061981 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d1ac3840-3da4-4a8a-bc05-99181c55d968-inventory" (OuterVolumeSpecName: "inventory") pod "d1ac3840-3da4-4a8a-bc05-99181c55d968" (UID: "d1ac3840-3da4-4a8a-bc05-99181c55d968"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:07:54 crc kubenswrapper[4779]: I0929 10:07:54.133838 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dmtsg\" (UniqueName: \"kubernetes.io/projected/d1ac3840-3da4-4a8a-bc05-99181c55d968-kube-api-access-dmtsg\") on node \"crc\" DevicePath \"\"" Sep 29 10:07:54 crc kubenswrapper[4779]: I0929 10:07:54.133879 4779 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d1ac3840-3da4-4a8a-bc05-99181c55d968-ceph\") on node \"crc\" DevicePath \"\"" Sep 29 10:07:54 crc kubenswrapper[4779]: I0929 10:07:54.133889 4779 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d1ac3840-3da4-4a8a-bc05-99181c55d968-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 10:07:54 crc kubenswrapper[4779]: I0929 10:07:54.133897 4779 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d1ac3840-3da4-4a8a-bc05-99181c55d968-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 10:07:54 crc kubenswrapper[4779]: I0929 10:07:54.469022 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-fnvx4" event={"ID":"d1ac3840-3da4-4a8a-bc05-99181c55d968","Type":"ContainerDied","Data":"5b07af12fe7a2dcaa14e343e70d344c810fd88c3516051812236b1ed8ee296f7"} Sep 29 10:07:54 crc kubenswrapper[4779]: I0929 10:07:54.469065 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5b07af12fe7a2dcaa14e343e70d344c810fd88c3516051812236b1ed8ee296f7" Sep 29 10:07:54 crc kubenswrapper[4779]: I0929 10:07:54.469117 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-fnvx4" Sep 29 10:07:54 crc kubenswrapper[4779]: I0929 10:07:54.557188 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-czz9f"] Sep 29 10:07:54 crc kubenswrapper[4779]: E0929 10:07:54.557571 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1ac3840-3da4-4a8a-bc05-99181c55d968" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Sep 29 10:07:54 crc kubenswrapper[4779]: I0929 10:07:54.557589 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1ac3840-3da4-4a8a-bc05-99181c55d968" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Sep 29 10:07:54 crc kubenswrapper[4779]: I0929 10:07:54.557750 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="d1ac3840-3da4-4a8a-bc05-99181c55d968" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Sep 29 10:07:54 crc kubenswrapper[4779]: I0929 10:07:54.558606 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-czz9f" Sep 29 10:07:54 crc kubenswrapper[4779]: I0929 10:07:54.560831 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 10:07:54 crc kubenswrapper[4779]: I0929 10:07:54.561242 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-l7nsb" Sep 29 10:07:54 crc kubenswrapper[4779]: I0929 10:07:54.561541 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Sep 29 10:07:54 crc kubenswrapper[4779]: I0929 10:07:54.561777 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 10:07:54 crc kubenswrapper[4779]: I0929 10:07:54.562015 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 10:07:54 crc kubenswrapper[4779]: I0929 10:07:54.568436 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-czz9f"] Sep 29 10:07:54 crc kubenswrapper[4779]: I0929 10:07:54.643282 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c5a92885-6922-471b-b66a-76e33b9e63a2-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-czz9f\" (UID: \"c5a92885-6922-471b-b66a-76e33b9e63a2\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-czz9f" Sep 29 10:07:54 crc kubenswrapper[4779]: I0929 10:07:54.643433 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c5a92885-6922-471b-b66a-76e33b9e63a2-ceph\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-czz9f\" (UID: \"c5a92885-6922-471b-b66a-76e33b9e63a2\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-czz9f" Sep 29 10:07:54 crc kubenswrapper[4779]: I0929 10:07:54.643467 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q46gl\" (UniqueName: \"kubernetes.io/projected/c5a92885-6922-471b-b66a-76e33b9e63a2-kube-api-access-q46gl\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-czz9f\" (UID: \"c5a92885-6922-471b-b66a-76e33b9e63a2\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-czz9f" Sep 29 10:07:54 crc kubenswrapper[4779]: I0929 10:07:54.643501 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c5a92885-6922-471b-b66a-76e33b9e63a2-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-czz9f\" (UID: \"c5a92885-6922-471b-b66a-76e33b9e63a2\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-czz9f" Sep 29 10:07:54 crc kubenswrapper[4779]: I0929 10:07:54.745926 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c5a92885-6922-471b-b66a-76e33b9e63a2-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-czz9f\" (UID: \"c5a92885-6922-471b-b66a-76e33b9e63a2\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-czz9f" Sep 29 10:07:54 crc kubenswrapper[4779]: I0929 10:07:54.746176 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c5a92885-6922-471b-b66a-76e33b9e63a2-ceph\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-czz9f\" (UID: \"c5a92885-6922-471b-b66a-76e33b9e63a2\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-czz9f" Sep 29 10:07:54 crc kubenswrapper[4779]: I0929 10:07:54.746229 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q46gl\" (UniqueName: \"kubernetes.io/projected/c5a92885-6922-471b-b66a-76e33b9e63a2-kube-api-access-q46gl\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-czz9f\" (UID: \"c5a92885-6922-471b-b66a-76e33b9e63a2\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-czz9f" Sep 29 10:07:54 crc kubenswrapper[4779]: I0929 10:07:54.746300 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c5a92885-6922-471b-b66a-76e33b9e63a2-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-czz9f\" (UID: \"c5a92885-6922-471b-b66a-76e33b9e63a2\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-czz9f" Sep 29 10:07:54 crc kubenswrapper[4779]: I0929 10:07:54.752949 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c5a92885-6922-471b-b66a-76e33b9e63a2-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-czz9f\" (UID: \"c5a92885-6922-471b-b66a-76e33b9e63a2\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-czz9f" Sep 29 10:07:54 crc kubenswrapper[4779]: I0929 10:07:54.754153 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c5a92885-6922-471b-b66a-76e33b9e63a2-ceph\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-czz9f\" (UID: \"c5a92885-6922-471b-b66a-76e33b9e63a2\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-czz9f" Sep 29 10:07:54 crc kubenswrapper[4779]: I0929 10:07:54.755530 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c5a92885-6922-471b-b66a-76e33b9e63a2-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-czz9f\" (UID: \"c5a92885-6922-471b-b66a-76e33b9e63a2\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-czz9f" Sep 29 10:07:54 crc kubenswrapper[4779]: I0929 10:07:54.771587 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q46gl\" (UniqueName: \"kubernetes.io/projected/c5a92885-6922-471b-b66a-76e33b9e63a2-kube-api-access-q46gl\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-czz9f\" (UID: \"c5a92885-6922-471b-b66a-76e33b9e63a2\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-czz9f" Sep 29 10:07:54 crc kubenswrapper[4779]: I0929 10:07:54.885840 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-czz9f" Sep 29 10:07:55 crc kubenswrapper[4779]: I0929 10:07:55.507728 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-czz9f"] Sep 29 10:07:56 crc kubenswrapper[4779]: I0929 10:07:56.494465 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-czz9f" event={"ID":"c5a92885-6922-471b-b66a-76e33b9e63a2","Type":"ContainerStarted","Data":"dcdc80c0a8139f989033ca31f0dc5e1acbcd416db8e9a51fb679b5441986597c"} Sep 29 10:07:56 crc kubenswrapper[4779]: I0929 10:07:56.494843 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-czz9f" event={"ID":"c5a92885-6922-471b-b66a-76e33b9e63a2","Type":"ContainerStarted","Data":"50219fc639de09041dddafdf06f079c12326a2deb7962eb358ac54ec8659e774"} Sep 29 10:07:56 crc kubenswrapper[4779]: I0929 10:07:56.515478 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-czz9f" podStartSLOduration=1.925390827 podStartE2EDuration="2.515410078s" podCreationTimestamp="2025-09-29 10:07:54 +0000 UTC" firstStartedPulling="2025-09-29 10:07:55.504321205 +0000 UTC m=+2307.485645109" lastFinishedPulling="2025-09-29 10:07:56.094340426 +0000 UTC m=+2308.075664360" observedRunningTime="2025-09-29 10:07:56.512474572 +0000 UTC m=+2308.493798496" watchObservedRunningTime="2025-09-29 10:07:56.515410078 +0000 UTC m=+2308.496733982" Sep 29 10:08:02 crc kubenswrapper[4779]: I0929 10:08:02.559256 4779 generic.go:334] "Generic (PLEG): container finished" podID="c5a92885-6922-471b-b66a-76e33b9e63a2" containerID="dcdc80c0a8139f989033ca31f0dc5e1acbcd416db8e9a51fb679b5441986597c" exitCode=0 Sep 29 10:08:02 crc kubenswrapper[4779]: I0929 10:08:02.559367 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-czz9f" event={"ID":"c5a92885-6922-471b-b66a-76e33b9e63a2","Type":"ContainerDied","Data":"dcdc80c0a8139f989033ca31f0dc5e1acbcd416db8e9a51fb679b5441986597c"} Sep 29 10:08:04 crc kubenswrapper[4779]: I0929 10:08:04.090788 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-czz9f" Sep 29 10:08:04 crc kubenswrapper[4779]: I0929 10:08:04.146391 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c5a92885-6922-471b-b66a-76e33b9e63a2-inventory\") pod \"c5a92885-6922-471b-b66a-76e33b9e63a2\" (UID: \"c5a92885-6922-471b-b66a-76e33b9e63a2\") " Sep 29 10:08:04 crc kubenswrapper[4779]: I0929 10:08:04.146602 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q46gl\" (UniqueName: \"kubernetes.io/projected/c5a92885-6922-471b-b66a-76e33b9e63a2-kube-api-access-q46gl\") pod \"c5a92885-6922-471b-b66a-76e33b9e63a2\" (UID: \"c5a92885-6922-471b-b66a-76e33b9e63a2\") " Sep 29 10:08:04 crc kubenswrapper[4779]: I0929 10:08:04.146656 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c5a92885-6922-471b-b66a-76e33b9e63a2-ceph\") pod \"c5a92885-6922-471b-b66a-76e33b9e63a2\" (UID: \"c5a92885-6922-471b-b66a-76e33b9e63a2\") " Sep 29 10:08:04 crc kubenswrapper[4779]: I0929 10:08:04.147651 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c5a92885-6922-471b-b66a-76e33b9e63a2-ssh-key\") pod \"c5a92885-6922-471b-b66a-76e33b9e63a2\" (UID: \"c5a92885-6922-471b-b66a-76e33b9e63a2\") " Sep 29 10:08:04 crc kubenswrapper[4779]: I0929 10:08:04.154295 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5a92885-6922-471b-b66a-76e33b9e63a2-ceph" (OuterVolumeSpecName: "ceph") pod "c5a92885-6922-471b-b66a-76e33b9e63a2" (UID: "c5a92885-6922-471b-b66a-76e33b9e63a2"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:08:04 crc kubenswrapper[4779]: I0929 10:08:04.154503 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c5a92885-6922-471b-b66a-76e33b9e63a2-kube-api-access-q46gl" (OuterVolumeSpecName: "kube-api-access-q46gl") pod "c5a92885-6922-471b-b66a-76e33b9e63a2" (UID: "c5a92885-6922-471b-b66a-76e33b9e63a2"). InnerVolumeSpecName "kube-api-access-q46gl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 10:08:04 crc kubenswrapper[4779]: I0929 10:08:04.182120 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5a92885-6922-471b-b66a-76e33b9e63a2-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c5a92885-6922-471b-b66a-76e33b9e63a2" (UID: "c5a92885-6922-471b-b66a-76e33b9e63a2"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:08:04 crc kubenswrapper[4779]: I0929 10:08:04.200815 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5a92885-6922-471b-b66a-76e33b9e63a2-inventory" (OuterVolumeSpecName: "inventory") pod "c5a92885-6922-471b-b66a-76e33b9e63a2" (UID: "c5a92885-6922-471b-b66a-76e33b9e63a2"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:08:04 crc kubenswrapper[4779]: I0929 10:08:04.250964 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q46gl\" (UniqueName: \"kubernetes.io/projected/c5a92885-6922-471b-b66a-76e33b9e63a2-kube-api-access-q46gl\") on node \"crc\" DevicePath \"\"" Sep 29 10:08:04 crc kubenswrapper[4779]: I0929 10:08:04.251016 4779 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c5a92885-6922-471b-b66a-76e33b9e63a2-ceph\") on node \"crc\" DevicePath \"\"" Sep 29 10:08:04 crc kubenswrapper[4779]: I0929 10:08:04.251027 4779 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c5a92885-6922-471b-b66a-76e33b9e63a2-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 10:08:04 crc kubenswrapper[4779]: I0929 10:08:04.251038 4779 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c5a92885-6922-471b-b66a-76e33b9e63a2-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 10:08:04 crc kubenswrapper[4779]: I0929 10:08:04.586517 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-czz9f" event={"ID":"c5a92885-6922-471b-b66a-76e33b9e63a2","Type":"ContainerDied","Data":"50219fc639de09041dddafdf06f079c12326a2deb7962eb358ac54ec8659e774"} Sep 29 10:08:04 crc kubenswrapper[4779]: I0929 10:08:04.586967 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="50219fc639de09041dddafdf06f079c12326a2deb7962eb358ac54ec8659e774" Sep 29 10:08:04 crc kubenswrapper[4779]: I0929 10:08:04.587226 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-czz9f" Sep 29 10:08:04 crc kubenswrapper[4779]: I0929 10:08:04.687837 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-t5q82"] Sep 29 10:08:04 crc kubenswrapper[4779]: E0929 10:08:04.688341 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5a92885-6922-471b-b66a-76e33b9e63a2" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Sep 29 10:08:04 crc kubenswrapper[4779]: I0929 10:08:04.688370 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5a92885-6922-471b-b66a-76e33b9e63a2" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Sep 29 10:08:04 crc kubenswrapper[4779]: I0929 10:08:04.688695 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5a92885-6922-471b-b66a-76e33b9e63a2" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Sep 29 10:08:04 crc kubenswrapper[4779]: I0929 10:08:04.690360 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-t5q82" Sep 29 10:08:04 crc kubenswrapper[4779]: I0929 10:08:04.693402 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 10:08:04 crc kubenswrapper[4779]: I0929 10:08:04.693509 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Sep 29 10:08:04 crc kubenswrapper[4779]: I0929 10:08:04.693936 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-l7nsb" Sep 29 10:08:04 crc kubenswrapper[4779]: I0929 10:08:04.694325 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 10:08:04 crc kubenswrapper[4779]: I0929 10:08:04.696562 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 10:08:04 crc kubenswrapper[4779]: I0929 10:08:04.700651 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-t5q82"] Sep 29 10:08:04 crc kubenswrapper[4779]: I0929 10:08:04.761658 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fe694ea6-6566-4145-8470-70caa70638d5-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-t5q82\" (UID: \"fe694ea6-6566-4145-8470-70caa70638d5\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-t5q82" Sep 29 10:08:04 crc kubenswrapper[4779]: I0929 10:08:04.761748 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gw5z8\" (UniqueName: \"kubernetes.io/projected/fe694ea6-6566-4145-8470-70caa70638d5-kube-api-access-gw5z8\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-t5q82\" (UID: \"fe694ea6-6566-4145-8470-70caa70638d5\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-t5q82" Sep 29 10:08:04 crc kubenswrapper[4779]: I0929 10:08:04.761997 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/fe694ea6-6566-4145-8470-70caa70638d5-ceph\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-t5q82\" (UID: \"fe694ea6-6566-4145-8470-70caa70638d5\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-t5q82" Sep 29 10:08:04 crc kubenswrapper[4779]: I0929 10:08:04.762271 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fe694ea6-6566-4145-8470-70caa70638d5-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-t5q82\" (UID: \"fe694ea6-6566-4145-8470-70caa70638d5\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-t5q82" Sep 29 10:08:04 crc kubenswrapper[4779]: I0929 10:08:04.864604 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fe694ea6-6566-4145-8470-70caa70638d5-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-t5q82\" (UID: \"fe694ea6-6566-4145-8470-70caa70638d5\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-t5q82" Sep 29 10:08:04 crc kubenswrapper[4779]: I0929 10:08:04.864708 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gw5z8\" (UniqueName: \"kubernetes.io/projected/fe694ea6-6566-4145-8470-70caa70638d5-kube-api-access-gw5z8\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-t5q82\" (UID: \"fe694ea6-6566-4145-8470-70caa70638d5\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-t5q82" Sep 29 10:08:04 crc kubenswrapper[4779]: I0929 10:08:04.864803 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/fe694ea6-6566-4145-8470-70caa70638d5-ceph\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-t5q82\" (UID: \"fe694ea6-6566-4145-8470-70caa70638d5\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-t5q82" Sep 29 10:08:04 crc kubenswrapper[4779]: I0929 10:08:04.865070 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fe694ea6-6566-4145-8470-70caa70638d5-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-t5q82\" (UID: \"fe694ea6-6566-4145-8470-70caa70638d5\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-t5q82" Sep 29 10:08:04 crc kubenswrapper[4779]: I0929 10:08:04.869833 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fe694ea6-6566-4145-8470-70caa70638d5-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-t5q82\" (UID: \"fe694ea6-6566-4145-8470-70caa70638d5\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-t5q82" Sep 29 10:08:04 crc kubenswrapper[4779]: I0929 10:08:04.870053 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/fe694ea6-6566-4145-8470-70caa70638d5-ceph\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-t5q82\" (UID: \"fe694ea6-6566-4145-8470-70caa70638d5\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-t5q82" Sep 29 10:08:04 crc kubenswrapper[4779]: I0929 10:08:04.871345 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fe694ea6-6566-4145-8470-70caa70638d5-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-t5q82\" (UID: \"fe694ea6-6566-4145-8470-70caa70638d5\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-t5q82" Sep 29 10:08:04 crc kubenswrapper[4779]: I0929 10:08:04.881951 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gw5z8\" (UniqueName: \"kubernetes.io/projected/fe694ea6-6566-4145-8470-70caa70638d5-kube-api-access-gw5z8\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-t5q82\" (UID: \"fe694ea6-6566-4145-8470-70caa70638d5\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-t5q82" Sep 29 10:08:05 crc kubenswrapper[4779]: I0929 10:08:05.010174 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-t5q82" Sep 29 10:08:05 crc kubenswrapper[4779]: I0929 10:08:05.557772 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-t5q82"] Sep 29 10:08:05 crc kubenswrapper[4779]: I0929 10:08:05.595017 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-t5q82" event={"ID":"fe694ea6-6566-4145-8470-70caa70638d5","Type":"ContainerStarted","Data":"c575074b812c4bacc1a98b18503695cd088aafedd37db857bf605456889d98f8"} Sep 29 10:08:06 crc kubenswrapper[4779]: I0929 10:08:06.609084 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-t5q82" event={"ID":"fe694ea6-6566-4145-8470-70caa70638d5","Type":"ContainerStarted","Data":"9e7c55e80c595e0172874289bbbd71da0f452f2d84b32307c3dbcdf0b92b88fc"} Sep 29 10:08:43 crc kubenswrapper[4779]: I0929 10:08:43.948949 4779 generic.go:334] "Generic (PLEG): container finished" podID="fe694ea6-6566-4145-8470-70caa70638d5" containerID="9e7c55e80c595e0172874289bbbd71da0f452f2d84b32307c3dbcdf0b92b88fc" exitCode=0 Sep 29 10:08:43 crc kubenswrapper[4779]: I0929 10:08:43.949011 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-t5q82" event={"ID":"fe694ea6-6566-4145-8470-70caa70638d5","Type":"ContainerDied","Data":"9e7c55e80c595e0172874289bbbd71da0f452f2d84b32307c3dbcdf0b92b88fc"} Sep 29 10:08:45 crc kubenswrapper[4779]: I0929 10:08:45.367081 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-t5q82" Sep 29 10:08:45 crc kubenswrapper[4779]: I0929 10:08:45.443260 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gw5z8\" (UniqueName: \"kubernetes.io/projected/fe694ea6-6566-4145-8470-70caa70638d5-kube-api-access-gw5z8\") pod \"fe694ea6-6566-4145-8470-70caa70638d5\" (UID: \"fe694ea6-6566-4145-8470-70caa70638d5\") " Sep 29 10:08:45 crc kubenswrapper[4779]: I0929 10:08:45.443427 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/fe694ea6-6566-4145-8470-70caa70638d5-ceph\") pod \"fe694ea6-6566-4145-8470-70caa70638d5\" (UID: \"fe694ea6-6566-4145-8470-70caa70638d5\") " Sep 29 10:08:45 crc kubenswrapper[4779]: I0929 10:08:45.443498 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fe694ea6-6566-4145-8470-70caa70638d5-ssh-key\") pod \"fe694ea6-6566-4145-8470-70caa70638d5\" (UID: \"fe694ea6-6566-4145-8470-70caa70638d5\") " Sep 29 10:08:45 crc kubenswrapper[4779]: I0929 10:08:45.443550 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fe694ea6-6566-4145-8470-70caa70638d5-inventory\") pod \"fe694ea6-6566-4145-8470-70caa70638d5\" (UID: \"fe694ea6-6566-4145-8470-70caa70638d5\") " Sep 29 10:08:45 crc kubenswrapper[4779]: I0929 10:08:45.448671 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fe694ea6-6566-4145-8470-70caa70638d5-kube-api-access-gw5z8" (OuterVolumeSpecName: "kube-api-access-gw5z8") pod "fe694ea6-6566-4145-8470-70caa70638d5" (UID: "fe694ea6-6566-4145-8470-70caa70638d5"). InnerVolumeSpecName "kube-api-access-gw5z8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 10:08:45 crc kubenswrapper[4779]: I0929 10:08:45.448667 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe694ea6-6566-4145-8470-70caa70638d5-ceph" (OuterVolumeSpecName: "ceph") pod "fe694ea6-6566-4145-8470-70caa70638d5" (UID: "fe694ea6-6566-4145-8470-70caa70638d5"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:08:45 crc kubenswrapper[4779]: I0929 10:08:45.473547 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe694ea6-6566-4145-8470-70caa70638d5-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "fe694ea6-6566-4145-8470-70caa70638d5" (UID: "fe694ea6-6566-4145-8470-70caa70638d5"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:08:45 crc kubenswrapper[4779]: I0929 10:08:45.476216 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe694ea6-6566-4145-8470-70caa70638d5-inventory" (OuterVolumeSpecName: "inventory") pod "fe694ea6-6566-4145-8470-70caa70638d5" (UID: "fe694ea6-6566-4145-8470-70caa70638d5"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:08:45 crc kubenswrapper[4779]: I0929 10:08:45.546304 4779 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fe694ea6-6566-4145-8470-70caa70638d5-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 10:08:45 crc kubenswrapper[4779]: I0929 10:08:45.546349 4779 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fe694ea6-6566-4145-8470-70caa70638d5-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 10:08:45 crc kubenswrapper[4779]: I0929 10:08:45.546366 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gw5z8\" (UniqueName: \"kubernetes.io/projected/fe694ea6-6566-4145-8470-70caa70638d5-kube-api-access-gw5z8\") on node \"crc\" DevicePath \"\"" Sep 29 10:08:45 crc kubenswrapper[4779]: I0929 10:08:45.546380 4779 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/fe694ea6-6566-4145-8470-70caa70638d5-ceph\") on node \"crc\" DevicePath \"\"" Sep 29 10:08:45 crc kubenswrapper[4779]: I0929 10:08:45.976371 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-t5q82" event={"ID":"fe694ea6-6566-4145-8470-70caa70638d5","Type":"ContainerDied","Data":"c575074b812c4bacc1a98b18503695cd088aafedd37db857bf605456889d98f8"} Sep 29 10:08:45 crc kubenswrapper[4779]: I0929 10:08:45.976432 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c575074b812c4bacc1a98b18503695cd088aafedd37db857bf605456889d98f8" Sep 29 10:08:45 crc kubenswrapper[4779]: I0929 10:08:45.976533 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-t5q82" Sep 29 10:08:46 crc kubenswrapper[4779]: I0929 10:08:46.094230 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-4cfqm"] Sep 29 10:08:46 crc kubenswrapper[4779]: E0929 10:08:46.094947 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe694ea6-6566-4145-8470-70caa70638d5" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Sep 29 10:08:46 crc kubenswrapper[4779]: I0929 10:08:46.094965 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe694ea6-6566-4145-8470-70caa70638d5" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Sep 29 10:08:46 crc kubenswrapper[4779]: I0929 10:08:46.095119 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe694ea6-6566-4145-8470-70caa70638d5" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Sep 29 10:08:46 crc kubenswrapper[4779]: I0929 10:08:46.095857 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-4cfqm" Sep 29 10:08:46 crc kubenswrapper[4779]: I0929 10:08:46.098309 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 10:08:46 crc kubenswrapper[4779]: I0929 10:08:46.098336 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 10:08:46 crc kubenswrapper[4779]: I0929 10:08:46.098212 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 10:08:46 crc kubenswrapper[4779]: I0929 10:08:46.098870 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Sep 29 10:08:46 crc kubenswrapper[4779]: I0929 10:08:46.100503 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-l7nsb" Sep 29 10:08:46 crc kubenswrapper[4779]: I0929 10:08:46.105549 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-4cfqm"] Sep 29 10:08:46 crc kubenswrapper[4779]: I0929 10:08:46.157697 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d3187122-fb81-4acf-bcf4-5c45896ea5de-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-4cfqm\" (UID: \"d3187122-fb81-4acf-bcf4-5c45896ea5de\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-4cfqm" Sep 29 10:08:46 crc kubenswrapper[4779]: I0929 10:08:46.157837 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bwjv4\" (UniqueName: \"kubernetes.io/projected/d3187122-fb81-4acf-bcf4-5c45896ea5de-kube-api-access-bwjv4\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-4cfqm\" (UID: \"d3187122-fb81-4acf-bcf4-5c45896ea5de\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-4cfqm" Sep 29 10:08:46 crc kubenswrapper[4779]: I0929 10:08:46.158008 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d3187122-fb81-4acf-bcf4-5c45896ea5de-ceph\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-4cfqm\" (UID: \"d3187122-fb81-4acf-bcf4-5c45896ea5de\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-4cfqm" Sep 29 10:08:46 crc kubenswrapper[4779]: I0929 10:08:46.158096 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d3187122-fb81-4acf-bcf4-5c45896ea5de-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-4cfqm\" (UID: \"d3187122-fb81-4acf-bcf4-5c45896ea5de\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-4cfqm" Sep 29 10:08:46 crc kubenswrapper[4779]: I0929 10:08:46.259426 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d3187122-fb81-4acf-bcf4-5c45896ea5de-ceph\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-4cfqm\" (UID: \"d3187122-fb81-4acf-bcf4-5c45896ea5de\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-4cfqm" Sep 29 10:08:46 crc kubenswrapper[4779]: I0929 10:08:46.259588 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d3187122-fb81-4acf-bcf4-5c45896ea5de-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-4cfqm\" (UID: \"d3187122-fb81-4acf-bcf4-5c45896ea5de\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-4cfqm" Sep 29 10:08:46 crc kubenswrapper[4779]: I0929 10:08:46.259661 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d3187122-fb81-4acf-bcf4-5c45896ea5de-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-4cfqm\" (UID: \"d3187122-fb81-4acf-bcf4-5c45896ea5de\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-4cfqm" Sep 29 10:08:46 crc kubenswrapper[4779]: I0929 10:08:46.259772 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bwjv4\" (UniqueName: \"kubernetes.io/projected/d3187122-fb81-4acf-bcf4-5c45896ea5de-kube-api-access-bwjv4\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-4cfqm\" (UID: \"d3187122-fb81-4acf-bcf4-5c45896ea5de\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-4cfqm" Sep 29 10:08:46 crc kubenswrapper[4779]: I0929 10:08:46.265562 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d3187122-fb81-4acf-bcf4-5c45896ea5de-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-4cfqm\" (UID: \"d3187122-fb81-4acf-bcf4-5c45896ea5de\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-4cfqm" Sep 29 10:08:46 crc kubenswrapper[4779]: I0929 10:08:46.265709 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d3187122-fb81-4acf-bcf4-5c45896ea5de-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-4cfqm\" (UID: \"d3187122-fb81-4acf-bcf4-5c45896ea5de\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-4cfqm" Sep 29 10:08:46 crc kubenswrapper[4779]: I0929 10:08:46.274962 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d3187122-fb81-4acf-bcf4-5c45896ea5de-ceph\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-4cfqm\" (UID: \"d3187122-fb81-4acf-bcf4-5c45896ea5de\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-4cfqm" Sep 29 10:08:46 crc kubenswrapper[4779]: I0929 10:08:46.282428 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bwjv4\" (UniqueName: \"kubernetes.io/projected/d3187122-fb81-4acf-bcf4-5c45896ea5de-kube-api-access-bwjv4\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-4cfqm\" (UID: \"d3187122-fb81-4acf-bcf4-5c45896ea5de\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-4cfqm" Sep 29 10:08:46 crc kubenswrapper[4779]: I0929 10:08:46.414515 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-4cfqm" Sep 29 10:08:46 crc kubenswrapper[4779]: I0929 10:08:46.940597 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-4cfqm"] Sep 29 10:08:46 crc kubenswrapper[4779]: I0929 10:08:46.965784 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 10:08:46 crc kubenswrapper[4779]: I0929 10:08:46.965830 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 10:08:46 crc kubenswrapper[4779]: I0929 10:08:46.985301 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-4cfqm" event={"ID":"d3187122-fb81-4acf-bcf4-5c45896ea5de","Type":"ContainerStarted","Data":"0772ecae036c5f321046bf569a92625d58604f9c81869191ca24f497c80738d9"} Sep 29 10:08:47 crc kubenswrapper[4779]: I0929 10:08:47.994715 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-4cfqm" event={"ID":"d3187122-fb81-4acf-bcf4-5c45896ea5de","Type":"ContainerStarted","Data":"e36b062fca64ef1db581cdd5cc76e76fe32355d832c20516754c173aa713bde4"} Sep 29 10:08:48 crc kubenswrapper[4779]: I0929 10:08:48.012743 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-4cfqm" podStartSLOduration=1.294962121 podStartE2EDuration="2.012724092s" podCreationTimestamp="2025-09-29 10:08:46 +0000 UTC" firstStartedPulling="2025-09-29 10:08:46.966849467 +0000 UTC m=+2358.948173371" lastFinishedPulling="2025-09-29 10:08:47.684611438 +0000 UTC m=+2359.665935342" observedRunningTime="2025-09-29 10:08:48.012284159 +0000 UTC m=+2359.993608063" watchObservedRunningTime="2025-09-29 10:08:48.012724092 +0000 UTC m=+2359.994047996" Sep 29 10:08:52 crc kubenswrapper[4779]: I0929 10:08:52.035042 4779 generic.go:334] "Generic (PLEG): container finished" podID="d3187122-fb81-4acf-bcf4-5c45896ea5de" containerID="e36b062fca64ef1db581cdd5cc76e76fe32355d832c20516754c173aa713bde4" exitCode=0 Sep 29 10:08:52 crc kubenswrapper[4779]: I0929 10:08:52.035141 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-4cfqm" event={"ID":"d3187122-fb81-4acf-bcf4-5c45896ea5de","Type":"ContainerDied","Data":"e36b062fca64ef1db581cdd5cc76e76fe32355d832c20516754c173aa713bde4"} Sep 29 10:08:53 crc kubenswrapper[4779]: I0929 10:08:53.449128 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-4cfqm" Sep 29 10:08:53 crc kubenswrapper[4779]: I0929 10:08:53.506686 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d3187122-fb81-4acf-bcf4-5c45896ea5de-ssh-key\") pod \"d3187122-fb81-4acf-bcf4-5c45896ea5de\" (UID: \"d3187122-fb81-4acf-bcf4-5c45896ea5de\") " Sep 29 10:08:53 crc kubenswrapper[4779]: I0929 10:08:53.506895 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bwjv4\" (UniqueName: \"kubernetes.io/projected/d3187122-fb81-4acf-bcf4-5c45896ea5de-kube-api-access-bwjv4\") pod \"d3187122-fb81-4acf-bcf4-5c45896ea5de\" (UID: \"d3187122-fb81-4acf-bcf4-5c45896ea5de\") " Sep 29 10:08:53 crc kubenswrapper[4779]: I0929 10:08:53.506943 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d3187122-fb81-4acf-bcf4-5c45896ea5de-ceph\") pod \"d3187122-fb81-4acf-bcf4-5c45896ea5de\" (UID: \"d3187122-fb81-4acf-bcf4-5c45896ea5de\") " Sep 29 10:08:53 crc kubenswrapper[4779]: I0929 10:08:53.506985 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d3187122-fb81-4acf-bcf4-5c45896ea5de-inventory\") pod \"d3187122-fb81-4acf-bcf4-5c45896ea5de\" (UID: \"d3187122-fb81-4acf-bcf4-5c45896ea5de\") " Sep 29 10:08:53 crc kubenswrapper[4779]: I0929 10:08:53.512378 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d3187122-fb81-4acf-bcf4-5c45896ea5de-kube-api-access-bwjv4" (OuterVolumeSpecName: "kube-api-access-bwjv4") pod "d3187122-fb81-4acf-bcf4-5c45896ea5de" (UID: "d3187122-fb81-4acf-bcf4-5c45896ea5de"). InnerVolumeSpecName "kube-api-access-bwjv4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 10:08:53 crc kubenswrapper[4779]: I0929 10:08:53.512876 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d3187122-fb81-4acf-bcf4-5c45896ea5de-ceph" (OuterVolumeSpecName: "ceph") pod "d3187122-fb81-4acf-bcf4-5c45896ea5de" (UID: "d3187122-fb81-4acf-bcf4-5c45896ea5de"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:08:53 crc kubenswrapper[4779]: I0929 10:08:53.542336 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d3187122-fb81-4acf-bcf4-5c45896ea5de-inventory" (OuterVolumeSpecName: "inventory") pod "d3187122-fb81-4acf-bcf4-5c45896ea5de" (UID: "d3187122-fb81-4acf-bcf4-5c45896ea5de"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:08:53 crc kubenswrapper[4779]: I0929 10:08:53.544039 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d3187122-fb81-4acf-bcf4-5c45896ea5de-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "d3187122-fb81-4acf-bcf4-5c45896ea5de" (UID: "d3187122-fb81-4acf-bcf4-5c45896ea5de"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:08:53 crc kubenswrapper[4779]: I0929 10:08:53.609017 4779 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d3187122-fb81-4acf-bcf4-5c45896ea5de-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 10:08:53 crc kubenswrapper[4779]: I0929 10:08:53.609062 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bwjv4\" (UniqueName: \"kubernetes.io/projected/d3187122-fb81-4acf-bcf4-5c45896ea5de-kube-api-access-bwjv4\") on node \"crc\" DevicePath \"\"" Sep 29 10:08:53 crc kubenswrapper[4779]: I0929 10:08:53.609078 4779 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d3187122-fb81-4acf-bcf4-5c45896ea5de-ceph\") on node \"crc\" DevicePath \"\"" Sep 29 10:08:53 crc kubenswrapper[4779]: I0929 10:08:53.609089 4779 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d3187122-fb81-4acf-bcf4-5c45896ea5de-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 10:08:54 crc kubenswrapper[4779]: I0929 10:08:54.055092 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-4cfqm" event={"ID":"d3187122-fb81-4acf-bcf4-5c45896ea5de","Type":"ContainerDied","Data":"0772ecae036c5f321046bf569a92625d58604f9c81869191ca24f497c80738d9"} Sep 29 10:08:54 crc kubenswrapper[4779]: I0929 10:08:54.055420 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0772ecae036c5f321046bf569a92625d58604f9c81869191ca24f497c80738d9" Sep 29 10:08:54 crc kubenswrapper[4779]: I0929 10:08:54.055187 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-4cfqm" Sep 29 10:08:54 crc kubenswrapper[4779]: I0929 10:08:54.135569 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-ftj8d"] Sep 29 10:08:54 crc kubenswrapper[4779]: E0929 10:08:54.136061 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3187122-fb81-4acf-bcf4-5c45896ea5de" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Sep 29 10:08:54 crc kubenswrapper[4779]: I0929 10:08:54.136089 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3187122-fb81-4acf-bcf4-5c45896ea5de" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Sep 29 10:08:54 crc kubenswrapper[4779]: I0929 10:08:54.136310 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3187122-fb81-4acf-bcf4-5c45896ea5de" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Sep 29 10:08:54 crc kubenswrapper[4779]: I0929 10:08:54.137208 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-ftj8d" Sep 29 10:08:54 crc kubenswrapper[4779]: I0929 10:08:54.139525 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Sep 29 10:08:54 crc kubenswrapper[4779]: I0929 10:08:54.139640 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 10:08:54 crc kubenswrapper[4779]: I0929 10:08:54.140153 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 10:08:54 crc kubenswrapper[4779]: I0929 10:08:54.142527 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-l7nsb" Sep 29 10:08:54 crc kubenswrapper[4779]: I0929 10:08:54.142781 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 10:08:54 crc kubenswrapper[4779]: I0929 10:08:54.148801 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-ftj8d"] Sep 29 10:08:54 crc kubenswrapper[4779]: I0929 10:08:54.219359 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f8ccea64-9a22-4841-93a1-8763ed086d3b-ceph\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-ftj8d\" (UID: \"f8ccea64-9a22-4841-93a1-8763ed086d3b\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-ftj8d" Sep 29 10:08:54 crc kubenswrapper[4779]: I0929 10:08:54.219463 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f8ccea64-9a22-4841-93a1-8763ed086d3b-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-ftj8d\" (UID: \"f8ccea64-9a22-4841-93a1-8763ed086d3b\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-ftj8d" Sep 29 10:08:54 crc kubenswrapper[4779]: I0929 10:08:54.219495 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f8ccea64-9a22-4841-93a1-8763ed086d3b-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-ftj8d\" (UID: \"f8ccea64-9a22-4841-93a1-8763ed086d3b\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-ftj8d" Sep 29 10:08:54 crc kubenswrapper[4779]: I0929 10:08:54.219665 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ptmz4\" (UniqueName: \"kubernetes.io/projected/f8ccea64-9a22-4841-93a1-8763ed086d3b-kube-api-access-ptmz4\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-ftj8d\" (UID: \"f8ccea64-9a22-4841-93a1-8763ed086d3b\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-ftj8d" Sep 29 10:08:54 crc kubenswrapper[4779]: I0929 10:08:54.321254 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f8ccea64-9a22-4841-93a1-8763ed086d3b-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-ftj8d\" (UID: \"f8ccea64-9a22-4841-93a1-8763ed086d3b\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-ftj8d" Sep 29 10:08:54 crc kubenswrapper[4779]: I0929 10:08:54.321323 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f8ccea64-9a22-4841-93a1-8763ed086d3b-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-ftj8d\" (UID: \"f8ccea64-9a22-4841-93a1-8763ed086d3b\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-ftj8d" Sep 29 10:08:54 crc kubenswrapper[4779]: I0929 10:08:54.321382 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ptmz4\" (UniqueName: \"kubernetes.io/projected/f8ccea64-9a22-4841-93a1-8763ed086d3b-kube-api-access-ptmz4\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-ftj8d\" (UID: \"f8ccea64-9a22-4841-93a1-8763ed086d3b\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-ftj8d" Sep 29 10:08:54 crc kubenswrapper[4779]: I0929 10:08:54.321430 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f8ccea64-9a22-4841-93a1-8763ed086d3b-ceph\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-ftj8d\" (UID: \"f8ccea64-9a22-4841-93a1-8763ed086d3b\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-ftj8d" Sep 29 10:08:54 crc kubenswrapper[4779]: I0929 10:08:54.326353 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f8ccea64-9a22-4841-93a1-8763ed086d3b-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-ftj8d\" (UID: \"f8ccea64-9a22-4841-93a1-8763ed086d3b\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-ftj8d" Sep 29 10:08:54 crc kubenswrapper[4779]: I0929 10:08:54.326359 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f8ccea64-9a22-4841-93a1-8763ed086d3b-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-ftj8d\" (UID: \"f8ccea64-9a22-4841-93a1-8763ed086d3b\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-ftj8d" Sep 29 10:08:54 crc kubenswrapper[4779]: I0929 10:08:54.331717 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f8ccea64-9a22-4841-93a1-8763ed086d3b-ceph\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-ftj8d\" (UID: \"f8ccea64-9a22-4841-93a1-8763ed086d3b\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-ftj8d" Sep 29 10:08:54 crc kubenswrapper[4779]: I0929 10:08:54.350086 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ptmz4\" (UniqueName: \"kubernetes.io/projected/f8ccea64-9a22-4841-93a1-8763ed086d3b-kube-api-access-ptmz4\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-ftj8d\" (UID: \"f8ccea64-9a22-4841-93a1-8763ed086d3b\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-ftj8d" Sep 29 10:08:54 crc kubenswrapper[4779]: I0929 10:08:54.455675 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-ftj8d" Sep 29 10:08:55 crc kubenswrapper[4779]: I0929 10:08:55.004004 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-ftj8d"] Sep 29 10:08:55 crc kubenswrapper[4779]: I0929 10:08:55.016432 4779 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 10:08:55 crc kubenswrapper[4779]: I0929 10:08:55.065795 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-ftj8d" event={"ID":"f8ccea64-9a22-4841-93a1-8763ed086d3b","Type":"ContainerStarted","Data":"2cf65147191dfa93b03b05e21047fd7b94ae1849d03936f951a7bb07d13949d9"} Sep 29 10:08:56 crc kubenswrapper[4779]: I0929 10:08:56.078998 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-ftj8d" event={"ID":"f8ccea64-9a22-4841-93a1-8763ed086d3b","Type":"ContainerStarted","Data":"6f7534c584660738f97945568d8ae8ef6ba7113c66688e73d77044e7f495d558"} Sep 29 10:08:56 crc kubenswrapper[4779]: I0929 10:08:56.126577 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-ftj8d" podStartSLOduration=1.6933453410000001 podStartE2EDuration="2.126548056s" podCreationTimestamp="2025-09-29 10:08:54 +0000 UTC" firstStartedPulling="2025-09-29 10:08:55.016074928 +0000 UTC m=+2366.997398872" lastFinishedPulling="2025-09-29 10:08:55.449277683 +0000 UTC m=+2367.430601587" observedRunningTime="2025-09-29 10:08:56.110765356 +0000 UTC m=+2368.092089250" watchObservedRunningTime="2025-09-29 10:08:56.126548056 +0000 UTC m=+2368.107871970" Sep 29 10:09:16 crc kubenswrapper[4779]: I0929 10:09:16.966341 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 10:09:16 crc kubenswrapper[4779]: I0929 10:09:16.967036 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 10:09:41 crc kubenswrapper[4779]: I0929 10:09:41.507140 4779 generic.go:334] "Generic (PLEG): container finished" podID="f8ccea64-9a22-4841-93a1-8763ed086d3b" containerID="6f7534c584660738f97945568d8ae8ef6ba7113c66688e73d77044e7f495d558" exitCode=0 Sep 29 10:09:41 crc kubenswrapper[4779]: I0929 10:09:41.507241 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-ftj8d" event={"ID":"f8ccea64-9a22-4841-93a1-8763ed086d3b","Type":"ContainerDied","Data":"6f7534c584660738f97945568d8ae8ef6ba7113c66688e73d77044e7f495d558"} Sep 29 10:09:42 crc kubenswrapper[4779]: I0929 10:09:42.925998 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-ftj8d" Sep 29 10:09:43 crc kubenswrapper[4779]: I0929 10:09:43.101342 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f8ccea64-9a22-4841-93a1-8763ed086d3b-inventory\") pod \"f8ccea64-9a22-4841-93a1-8763ed086d3b\" (UID: \"f8ccea64-9a22-4841-93a1-8763ed086d3b\") " Sep 29 10:09:43 crc kubenswrapper[4779]: I0929 10:09:43.101586 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f8ccea64-9a22-4841-93a1-8763ed086d3b-ceph\") pod \"f8ccea64-9a22-4841-93a1-8763ed086d3b\" (UID: \"f8ccea64-9a22-4841-93a1-8763ed086d3b\") " Sep 29 10:09:43 crc kubenswrapper[4779]: I0929 10:09:43.101700 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f8ccea64-9a22-4841-93a1-8763ed086d3b-ssh-key\") pod \"f8ccea64-9a22-4841-93a1-8763ed086d3b\" (UID: \"f8ccea64-9a22-4841-93a1-8763ed086d3b\") " Sep 29 10:09:43 crc kubenswrapper[4779]: I0929 10:09:43.101767 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ptmz4\" (UniqueName: \"kubernetes.io/projected/f8ccea64-9a22-4841-93a1-8763ed086d3b-kube-api-access-ptmz4\") pod \"f8ccea64-9a22-4841-93a1-8763ed086d3b\" (UID: \"f8ccea64-9a22-4841-93a1-8763ed086d3b\") " Sep 29 10:09:43 crc kubenswrapper[4779]: I0929 10:09:43.108449 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8ccea64-9a22-4841-93a1-8763ed086d3b-ceph" (OuterVolumeSpecName: "ceph") pod "f8ccea64-9a22-4841-93a1-8763ed086d3b" (UID: "f8ccea64-9a22-4841-93a1-8763ed086d3b"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:09:43 crc kubenswrapper[4779]: I0929 10:09:43.127166 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f8ccea64-9a22-4841-93a1-8763ed086d3b-kube-api-access-ptmz4" (OuterVolumeSpecName: "kube-api-access-ptmz4") pod "f8ccea64-9a22-4841-93a1-8763ed086d3b" (UID: "f8ccea64-9a22-4841-93a1-8763ed086d3b"). InnerVolumeSpecName "kube-api-access-ptmz4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 10:09:43 crc kubenswrapper[4779]: I0929 10:09:43.144223 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8ccea64-9a22-4841-93a1-8763ed086d3b-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "f8ccea64-9a22-4841-93a1-8763ed086d3b" (UID: "f8ccea64-9a22-4841-93a1-8763ed086d3b"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:09:43 crc kubenswrapper[4779]: I0929 10:09:43.200489 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8ccea64-9a22-4841-93a1-8763ed086d3b-inventory" (OuterVolumeSpecName: "inventory") pod "f8ccea64-9a22-4841-93a1-8763ed086d3b" (UID: "f8ccea64-9a22-4841-93a1-8763ed086d3b"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:09:43 crc kubenswrapper[4779]: I0929 10:09:43.204881 4779 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f8ccea64-9a22-4841-93a1-8763ed086d3b-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 10:09:43 crc kubenswrapper[4779]: I0929 10:09:43.204927 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ptmz4\" (UniqueName: \"kubernetes.io/projected/f8ccea64-9a22-4841-93a1-8763ed086d3b-kube-api-access-ptmz4\") on node \"crc\" DevicePath \"\"" Sep 29 10:09:43 crc kubenswrapper[4779]: I0929 10:09:43.204939 4779 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f8ccea64-9a22-4841-93a1-8763ed086d3b-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 10:09:43 crc kubenswrapper[4779]: I0929 10:09:43.204948 4779 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f8ccea64-9a22-4841-93a1-8763ed086d3b-ceph\") on node \"crc\" DevicePath \"\"" Sep 29 10:09:43 crc kubenswrapper[4779]: I0929 10:09:43.545435 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-ftj8d" event={"ID":"f8ccea64-9a22-4841-93a1-8763ed086d3b","Type":"ContainerDied","Data":"2cf65147191dfa93b03b05e21047fd7b94ae1849d03936f951a7bb07d13949d9"} Sep 29 10:09:43 crc kubenswrapper[4779]: I0929 10:09:43.545495 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2cf65147191dfa93b03b05e21047fd7b94ae1849d03936f951a7bb07d13949d9" Sep 29 10:09:43 crc kubenswrapper[4779]: I0929 10:09:43.545613 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-ftj8d" Sep 29 10:09:43 crc kubenswrapper[4779]: I0929 10:09:43.638168 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-qhfkw"] Sep 29 10:09:43 crc kubenswrapper[4779]: E0929 10:09:43.638820 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8ccea64-9a22-4841-93a1-8763ed086d3b" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Sep 29 10:09:43 crc kubenswrapper[4779]: I0929 10:09:43.638836 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8ccea64-9a22-4841-93a1-8763ed086d3b" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Sep 29 10:09:43 crc kubenswrapper[4779]: I0929 10:09:43.639082 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="f8ccea64-9a22-4841-93a1-8763ed086d3b" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Sep 29 10:09:43 crc kubenswrapper[4779]: I0929 10:09:43.640476 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-qhfkw" Sep 29 10:09:43 crc kubenswrapper[4779]: I0929 10:09:43.644558 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 10:09:43 crc kubenswrapper[4779]: I0929 10:09:43.645409 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-l7nsb" Sep 29 10:09:43 crc kubenswrapper[4779]: I0929 10:09:43.645573 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 10:09:43 crc kubenswrapper[4779]: I0929 10:09:43.645725 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Sep 29 10:09:43 crc kubenswrapper[4779]: I0929 10:09:43.647972 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 10:09:43 crc kubenswrapper[4779]: I0929 10:09:43.671699 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-qhfkw"] Sep 29 10:09:43 crc kubenswrapper[4779]: I0929 10:09:43.728894 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/660bebed-55a7-40c0-96d7-244c49608f42-ceph\") pod \"ssh-known-hosts-edpm-deployment-qhfkw\" (UID: \"660bebed-55a7-40c0-96d7-244c49608f42\") " pod="openstack/ssh-known-hosts-edpm-deployment-qhfkw" Sep 29 10:09:43 crc kubenswrapper[4779]: I0929 10:09:43.728957 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/660bebed-55a7-40c0-96d7-244c49608f42-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-qhfkw\" (UID: \"660bebed-55a7-40c0-96d7-244c49608f42\") " pod="openstack/ssh-known-hosts-edpm-deployment-qhfkw" Sep 29 10:09:43 crc kubenswrapper[4779]: I0929 10:09:43.729058 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g79ql\" (UniqueName: \"kubernetes.io/projected/660bebed-55a7-40c0-96d7-244c49608f42-kube-api-access-g79ql\") pod \"ssh-known-hosts-edpm-deployment-qhfkw\" (UID: \"660bebed-55a7-40c0-96d7-244c49608f42\") " pod="openstack/ssh-known-hosts-edpm-deployment-qhfkw" Sep 29 10:09:43 crc kubenswrapper[4779]: I0929 10:09:43.729114 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/660bebed-55a7-40c0-96d7-244c49608f42-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-qhfkw\" (UID: \"660bebed-55a7-40c0-96d7-244c49608f42\") " pod="openstack/ssh-known-hosts-edpm-deployment-qhfkw" Sep 29 10:09:43 crc kubenswrapper[4779]: I0929 10:09:43.832389 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/660bebed-55a7-40c0-96d7-244c49608f42-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-qhfkw\" (UID: \"660bebed-55a7-40c0-96d7-244c49608f42\") " pod="openstack/ssh-known-hosts-edpm-deployment-qhfkw" Sep 29 10:09:43 crc kubenswrapper[4779]: I0929 10:09:43.832597 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/660bebed-55a7-40c0-96d7-244c49608f42-ceph\") pod \"ssh-known-hosts-edpm-deployment-qhfkw\" (UID: \"660bebed-55a7-40c0-96d7-244c49608f42\") " pod="openstack/ssh-known-hosts-edpm-deployment-qhfkw" Sep 29 10:09:43 crc kubenswrapper[4779]: I0929 10:09:43.832621 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/660bebed-55a7-40c0-96d7-244c49608f42-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-qhfkw\" (UID: \"660bebed-55a7-40c0-96d7-244c49608f42\") " pod="openstack/ssh-known-hosts-edpm-deployment-qhfkw" Sep 29 10:09:43 crc kubenswrapper[4779]: I0929 10:09:43.832687 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g79ql\" (UniqueName: \"kubernetes.io/projected/660bebed-55a7-40c0-96d7-244c49608f42-kube-api-access-g79ql\") pod \"ssh-known-hosts-edpm-deployment-qhfkw\" (UID: \"660bebed-55a7-40c0-96d7-244c49608f42\") " pod="openstack/ssh-known-hosts-edpm-deployment-qhfkw" Sep 29 10:09:43 crc kubenswrapper[4779]: I0929 10:09:43.838501 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/660bebed-55a7-40c0-96d7-244c49608f42-ceph\") pod \"ssh-known-hosts-edpm-deployment-qhfkw\" (UID: \"660bebed-55a7-40c0-96d7-244c49608f42\") " pod="openstack/ssh-known-hosts-edpm-deployment-qhfkw" Sep 29 10:09:43 crc kubenswrapper[4779]: I0929 10:09:43.839260 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/660bebed-55a7-40c0-96d7-244c49608f42-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-qhfkw\" (UID: \"660bebed-55a7-40c0-96d7-244c49608f42\") " pod="openstack/ssh-known-hosts-edpm-deployment-qhfkw" Sep 29 10:09:43 crc kubenswrapper[4779]: I0929 10:09:43.850457 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/660bebed-55a7-40c0-96d7-244c49608f42-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-qhfkw\" (UID: \"660bebed-55a7-40c0-96d7-244c49608f42\") " pod="openstack/ssh-known-hosts-edpm-deployment-qhfkw" Sep 29 10:09:43 crc kubenswrapper[4779]: I0929 10:09:43.851010 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g79ql\" (UniqueName: \"kubernetes.io/projected/660bebed-55a7-40c0-96d7-244c49608f42-kube-api-access-g79ql\") pod \"ssh-known-hosts-edpm-deployment-qhfkw\" (UID: \"660bebed-55a7-40c0-96d7-244c49608f42\") " pod="openstack/ssh-known-hosts-edpm-deployment-qhfkw" Sep 29 10:09:43 crc kubenswrapper[4779]: I0929 10:09:43.963964 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-qhfkw" Sep 29 10:09:44 crc kubenswrapper[4779]: I0929 10:09:44.508842 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-qhfkw"] Sep 29 10:09:44 crc kubenswrapper[4779]: I0929 10:09:44.554787 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-qhfkw" event={"ID":"660bebed-55a7-40c0-96d7-244c49608f42","Type":"ContainerStarted","Data":"d360bd8608fe63642541704f58ea3db49e6bb678fc4675385f07b3c818de2b8a"} Sep 29 10:09:45 crc kubenswrapper[4779]: I0929 10:09:45.565058 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-qhfkw" event={"ID":"660bebed-55a7-40c0-96d7-244c49608f42","Type":"ContainerStarted","Data":"33f8c34e762110240a7354053be238bee51f3c78cf95d38dace06b18cc52cc79"} Sep 29 10:09:45 crc kubenswrapper[4779]: I0929 10:09:45.584172 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-qhfkw" podStartSLOduration=1.949200381 podStartE2EDuration="2.584152906s" podCreationTimestamp="2025-09-29 10:09:43 +0000 UTC" firstStartedPulling="2025-09-29 10:09:44.516179662 +0000 UTC m=+2416.497503566" lastFinishedPulling="2025-09-29 10:09:45.151132147 +0000 UTC m=+2417.132456091" observedRunningTime="2025-09-29 10:09:45.578418789 +0000 UTC m=+2417.559742703" watchObservedRunningTime="2025-09-29 10:09:45.584152906 +0000 UTC m=+2417.565476810" Sep 29 10:09:46 crc kubenswrapper[4779]: I0929 10:09:46.966379 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 10:09:46 crc kubenswrapper[4779]: I0929 10:09:46.966702 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 10:09:46 crc kubenswrapper[4779]: I0929 10:09:46.966747 4779 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" Sep 29 10:09:46 crc kubenswrapper[4779]: I0929 10:09:46.967478 4779 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6c3b4d9e1288a1d587469d8a17efdcacdfb592049e65b194a12258479174566e"} pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 10:09:46 crc kubenswrapper[4779]: I0929 10:09:46.967535 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" containerID="cri-o://6c3b4d9e1288a1d587469d8a17efdcacdfb592049e65b194a12258479174566e" gracePeriod=600 Sep 29 10:09:47 crc kubenswrapper[4779]: E0929 10:09:47.092312 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:09:47 crc kubenswrapper[4779]: I0929 10:09:47.584975 4779 generic.go:334] "Generic (PLEG): container finished" podID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerID="6c3b4d9e1288a1d587469d8a17efdcacdfb592049e65b194a12258479174566e" exitCode=0 Sep 29 10:09:47 crc kubenswrapper[4779]: I0929 10:09:47.585018 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" event={"ID":"f1a5d3a7-37d9-4a87-864c-e4af7f504a19","Type":"ContainerDied","Data":"6c3b4d9e1288a1d587469d8a17efdcacdfb592049e65b194a12258479174566e"} Sep 29 10:09:47 crc kubenswrapper[4779]: I0929 10:09:47.585051 4779 scope.go:117] "RemoveContainer" containerID="2ea7c57f54acd206248deed2c0656374ecfa2983e6d9220e44a227622fb5b4f8" Sep 29 10:09:47 crc kubenswrapper[4779]: I0929 10:09:47.585774 4779 scope.go:117] "RemoveContainer" containerID="6c3b4d9e1288a1d587469d8a17efdcacdfb592049e65b194a12258479174566e" Sep 29 10:09:47 crc kubenswrapper[4779]: E0929 10:09:47.586124 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:09:55 crc kubenswrapper[4779]: I0929 10:09:55.665134 4779 generic.go:334] "Generic (PLEG): container finished" podID="660bebed-55a7-40c0-96d7-244c49608f42" containerID="33f8c34e762110240a7354053be238bee51f3c78cf95d38dace06b18cc52cc79" exitCode=0 Sep 29 10:09:55 crc kubenswrapper[4779]: I0929 10:09:55.665266 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-qhfkw" event={"ID":"660bebed-55a7-40c0-96d7-244c49608f42","Type":"ContainerDied","Data":"33f8c34e762110240a7354053be238bee51f3c78cf95d38dace06b18cc52cc79"} Sep 29 10:09:57 crc kubenswrapper[4779]: I0929 10:09:57.190504 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-qhfkw" Sep 29 10:09:57 crc kubenswrapper[4779]: I0929 10:09:57.323342 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g79ql\" (UniqueName: \"kubernetes.io/projected/660bebed-55a7-40c0-96d7-244c49608f42-kube-api-access-g79ql\") pod \"660bebed-55a7-40c0-96d7-244c49608f42\" (UID: \"660bebed-55a7-40c0-96d7-244c49608f42\") " Sep 29 10:09:57 crc kubenswrapper[4779]: I0929 10:09:57.323761 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/660bebed-55a7-40c0-96d7-244c49608f42-ssh-key-openstack-edpm-ipam\") pod \"660bebed-55a7-40c0-96d7-244c49608f42\" (UID: \"660bebed-55a7-40c0-96d7-244c49608f42\") " Sep 29 10:09:57 crc kubenswrapper[4779]: I0929 10:09:57.323792 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/660bebed-55a7-40c0-96d7-244c49608f42-ceph\") pod \"660bebed-55a7-40c0-96d7-244c49608f42\" (UID: \"660bebed-55a7-40c0-96d7-244c49608f42\") " Sep 29 10:09:57 crc kubenswrapper[4779]: I0929 10:09:57.323823 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/660bebed-55a7-40c0-96d7-244c49608f42-inventory-0\") pod \"660bebed-55a7-40c0-96d7-244c49608f42\" (UID: \"660bebed-55a7-40c0-96d7-244c49608f42\") " Sep 29 10:09:57 crc kubenswrapper[4779]: I0929 10:09:57.329220 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/660bebed-55a7-40c0-96d7-244c49608f42-kube-api-access-g79ql" (OuterVolumeSpecName: "kube-api-access-g79ql") pod "660bebed-55a7-40c0-96d7-244c49608f42" (UID: "660bebed-55a7-40c0-96d7-244c49608f42"). InnerVolumeSpecName "kube-api-access-g79ql". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 10:09:57 crc kubenswrapper[4779]: I0929 10:09:57.329587 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/660bebed-55a7-40c0-96d7-244c49608f42-ceph" (OuterVolumeSpecName: "ceph") pod "660bebed-55a7-40c0-96d7-244c49608f42" (UID: "660bebed-55a7-40c0-96d7-244c49608f42"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:09:57 crc kubenswrapper[4779]: I0929 10:09:57.354389 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/660bebed-55a7-40c0-96d7-244c49608f42-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "660bebed-55a7-40c0-96d7-244c49608f42" (UID: "660bebed-55a7-40c0-96d7-244c49608f42"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:09:57 crc kubenswrapper[4779]: I0929 10:09:57.355583 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/660bebed-55a7-40c0-96d7-244c49608f42-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "660bebed-55a7-40c0-96d7-244c49608f42" (UID: "660bebed-55a7-40c0-96d7-244c49608f42"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:09:57 crc kubenswrapper[4779]: I0929 10:09:57.430416 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g79ql\" (UniqueName: \"kubernetes.io/projected/660bebed-55a7-40c0-96d7-244c49608f42-kube-api-access-g79ql\") on node \"crc\" DevicePath \"\"" Sep 29 10:09:57 crc kubenswrapper[4779]: I0929 10:09:57.430449 4779 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/660bebed-55a7-40c0-96d7-244c49608f42-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Sep 29 10:09:57 crc kubenswrapper[4779]: I0929 10:09:57.430460 4779 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/660bebed-55a7-40c0-96d7-244c49608f42-ceph\") on node \"crc\" DevicePath \"\"" Sep 29 10:09:57 crc kubenswrapper[4779]: I0929 10:09:57.430470 4779 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/660bebed-55a7-40c0-96d7-244c49608f42-inventory-0\") on node \"crc\" DevicePath \"\"" Sep 29 10:09:57 crc kubenswrapper[4779]: I0929 10:09:57.686029 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-qhfkw" event={"ID":"660bebed-55a7-40c0-96d7-244c49608f42","Type":"ContainerDied","Data":"d360bd8608fe63642541704f58ea3db49e6bb678fc4675385f07b3c818de2b8a"} Sep 29 10:09:57 crc kubenswrapper[4779]: I0929 10:09:57.686086 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d360bd8608fe63642541704f58ea3db49e6bb678fc4675385f07b3c818de2b8a" Sep 29 10:09:57 crc kubenswrapper[4779]: I0929 10:09:57.686156 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-qhfkw" Sep 29 10:09:57 crc kubenswrapper[4779]: I0929 10:09:57.769997 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-tzhrj"] Sep 29 10:09:57 crc kubenswrapper[4779]: E0929 10:09:57.770508 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="660bebed-55a7-40c0-96d7-244c49608f42" containerName="ssh-known-hosts-edpm-deployment" Sep 29 10:09:57 crc kubenswrapper[4779]: I0929 10:09:57.770535 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="660bebed-55a7-40c0-96d7-244c49608f42" containerName="ssh-known-hosts-edpm-deployment" Sep 29 10:09:57 crc kubenswrapper[4779]: I0929 10:09:57.770799 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="660bebed-55a7-40c0-96d7-244c49608f42" containerName="ssh-known-hosts-edpm-deployment" Sep 29 10:09:57 crc kubenswrapper[4779]: I0929 10:09:57.771705 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-tzhrj" Sep 29 10:09:57 crc kubenswrapper[4779]: I0929 10:09:57.780627 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 10:09:57 crc kubenswrapper[4779]: I0929 10:09:57.780817 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Sep 29 10:09:57 crc kubenswrapper[4779]: I0929 10:09:57.780870 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 10:09:57 crc kubenswrapper[4779]: I0929 10:09:57.781649 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-l7nsb" Sep 29 10:09:57 crc kubenswrapper[4779]: I0929 10:09:57.781805 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 10:09:57 crc kubenswrapper[4779]: I0929 10:09:57.787504 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-tzhrj"] Sep 29 10:09:57 crc kubenswrapper[4779]: I0929 10:09:57.843941 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/af4a138d-7203-4903-84b8-76586f3d9969-ceph\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-tzhrj\" (UID: \"af4a138d-7203-4903-84b8-76586f3d9969\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-tzhrj" Sep 29 10:09:57 crc kubenswrapper[4779]: I0929 10:09:57.844010 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/af4a138d-7203-4903-84b8-76586f3d9969-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-tzhrj\" (UID: \"af4a138d-7203-4903-84b8-76586f3d9969\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-tzhrj" Sep 29 10:09:57 crc kubenswrapper[4779]: I0929 10:09:57.844063 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/af4a138d-7203-4903-84b8-76586f3d9969-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-tzhrj\" (UID: \"af4a138d-7203-4903-84b8-76586f3d9969\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-tzhrj" Sep 29 10:09:57 crc kubenswrapper[4779]: I0929 10:09:57.844317 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cwzgh\" (UniqueName: \"kubernetes.io/projected/af4a138d-7203-4903-84b8-76586f3d9969-kube-api-access-cwzgh\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-tzhrj\" (UID: \"af4a138d-7203-4903-84b8-76586f3d9969\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-tzhrj" Sep 29 10:09:57 crc kubenswrapper[4779]: I0929 10:09:57.945574 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cwzgh\" (UniqueName: \"kubernetes.io/projected/af4a138d-7203-4903-84b8-76586f3d9969-kube-api-access-cwzgh\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-tzhrj\" (UID: \"af4a138d-7203-4903-84b8-76586f3d9969\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-tzhrj" Sep 29 10:09:57 crc kubenswrapper[4779]: I0929 10:09:57.945890 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/af4a138d-7203-4903-84b8-76586f3d9969-ceph\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-tzhrj\" (UID: \"af4a138d-7203-4903-84b8-76586f3d9969\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-tzhrj" Sep 29 10:09:57 crc kubenswrapper[4779]: I0929 10:09:57.945961 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/af4a138d-7203-4903-84b8-76586f3d9969-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-tzhrj\" (UID: \"af4a138d-7203-4903-84b8-76586f3d9969\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-tzhrj" Sep 29 10:09:57 crc kubenswrapper[4779]: I0929 10:09:57.945988 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/af4a138d-7203-4903-84b8-76586f3d9969-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-tzhrj\" (UID: \"af4a138d-7203-4903-84b8-76586f3d9969\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-tzhrj" Sep 29 10:09:57 crc kubenswrapper[4779]: I0929 10:09:57.949747 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/af4a138d-7203-4903-84b8-76586f3d9969-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-tzhrj\" (UID: \"af4a138d-7203-4903-84b8-76586f3d9969\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-tzhrj" Sep 29 10:09:57 crc kubenswrapper[4779]: I0929 10:09:57.951678 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/af4a138d-7203-4903-84b8-76586f3d9969-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-tzhrj\" (UID: \"af4a138d-7203-4903-84b8-76586f3d9969\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-tzhrj" Sep 29 10:09:57 crc kubenswrapper[4779]: I0929 10:09:57.951696 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/af4a138d-7203-4903-84b8-76586f3d9969-ceph\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-tzhrj\" (UID: \"af4a138d-7203-4903-84b8-76586f3d9969\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-tzhrj" Sep 29 10:09:57 crc kubenswrapper[4779]: I0929 10:09:57.962346 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cwzgh\" (UniqueName: \"kubernetes.io/projected/af4a138d-7203-4903-84b8-76586f3d9969-kube-api-access-cwzgh\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-tzhrj\" (UID: \"af4a138d-7203-4903-84b8-76586f3d9969\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-tzhrj" Sep 29 10:09:58 crc kubenswrapper[4779]: I0929 10:09:58.093875 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-tzhrj" Sep 29 10:09:58 crc kubenswrapper[4779]: I0929 10:09:58.645489 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-tzhrj"] Sep 29 10:09:58 crc kubenswrapper[4779]: W0929 10:09:58.645919 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaf4a138d_7203_4903_84b8_76586f3d9969.slice/crio-0402c896088d5524000514895b1d10af017cba9f093f8bc87d4abdb792ff0a2a WatchSource:0}: Error finding container 0402c896088d5524000514895b1d10af017cba9f093f8bc87d4abdb792ff0a2a: Status 404 returned error can't find the container with id 0402c896088d5524000514895b1d10af017cba9f093f8bc87d4abdb792ff0a2a Sep 29 10:09:58 crc kubenswrapper[4779]: I0929 10:09:58.693541 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-tzhrj" event={"ID":"af4a138d-7203-4903-84b8-76586f3d9969","Type":"ContainerStarted","Data":"0402c896088d5524000514895b1d10af017cba9f093f8bc87d4abdb792ff0a2a"} Sep 29 10:10:00 crc kubenswrapper[4779]: I0929 10:10:00.738595 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-tzhrj" event={"ID":"af4a138d-7203-4903-84b8-76586f3d9969","Type":"ContainerStarted","Data":"e2d1c90f46ed2ff974f3217937b51a031b5a511d35e04052a998599c2e1d5127"} Sep 29 10:10:00 crc kubenswrapper[4779]: I0929 10:10:00.764687 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-tzhrj" podStartSLOduration=2.70092299 podStartE2EDuration="3.76467074s" podCreationTimestamp="2025-09-29 10:09:57 +0000 UTC" firstStartedPulling="2025-09-29 10:09:58.651116436 +0000 UTC m=+2430.632440340" lastFinishedPulling="2025-09-29 10:09:59.714864186 +0000 UTC m=+2431.696188090" observedRunningTime="2025-09-29 10:10:00.756886913 +0000 UTC m=+2432.738210837" watchObservedRunningTime="2025-09-29 10:10:00.76467074 +0000 UTC m=+2432.745994644" Sep 29 10:10:01 crc kubenswrapper[4779]: I0929 10:10:01.714890 4779 scope.go:117] "RemoveContainer" containerID="6c3b4d9e1288a1d587469d8a17efdcacdfb592049e65b194a12258479174566e" Sep 29 10:10:01 crc kubenswrapper[4779]: E0929 10:10:01.715408 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:10:07 crc kubenswrapper[4779]: I0929 10:10:07.799554 4779 generic.go:334] "Generic (PLEG): container finished" podID="af4a138d-7203-4903-84b8-76586f3d9969" containerID="e2d1c90f46ed2ff974f3217937b51a031b5a511d35e04052a998599c2e1d5127" exitCode=0 Sep 29 10:10:07 crc kubenswrapper[4779]: I0929 10:10:07.800522 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-tzhrj" event={"ID":"af4a138d-7203-4903-84b8-76586f3d9969","Type":"ContainerDied","Data":"e2d1c90f46ed2ff974f3217937b51a031b5a511d35e04052a998599c2e1d5127"} Sep 29 10:10:09 crc kubenswrapper[4779]: I0929 10:10:09.221347 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-tzhrj" Sep 29 10:10:09 crc kubenswrapper[4779]: I0929 10:10:09.373503 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cwzgh\" (UniqueName: \"kubernetes.io/projected/af4a138d-7203-4903-84b8-76586f3d9969-kube-api-access-cwzgh\") pod \"af4a138d-7203-4903-84b8-76586f3d9969\" (UID: \"af4a138d-7203-4903-84b8-76586f3d9969\") " Sep 29 10:10:09 crc kubenswrapper[4779]: I0929 10:10:09.374615 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/af4a138d-7203-4903-84b8-76586f3d9969-inventory\") pod \"af4a138d-7203-4903-84b8-76586f3d9969\" (UID: \"af4a138d-7203-4903-84b8-76586f3d9969\") " Sep 29 10:10:09 crc kubenswrapper[4779]: I0929 10:10:09.374638 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/af4a138d-7203-4903-84b8-76586f3d9969-ceph\") pod \"af4a138d-7203-4903-84b8-76586f3d9969\" (UID: \"af4a138d-7203-4903-84b8-76586f3d9969\") " Sep 29 10:10:09 crc kubenswrapper[4779]: I0929 10:10:09.374757 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/af4a138d-7203-4903-84b8-76586f3d9969-ssh-key\") pod \"af4a138d-7203-4903-84b8-76586f3d9969\" (UID: \"af4a138d-7203-4903-84b8-76586f3d9969\") " Sep 29 10:10:09 crc kubenswrapper[4779]: I0929 10:10:09.379117 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af4a138d-7203-4903-84b8-76586f3d9969-kube-api-access-cwzgh" (OuterVolumeSpecName: "kube-api-access-cwzgh") pod "af4a138d-7203-4903-84b8-76586f3d9969" (UID: "af4a138d-7203-4903-84b8-76586f3d9969"). InnerVolumeSpecName "kube-api-access-cwzgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 10:10:09 crc kubenswrapper[4779]: I0929 10:10:09.379297 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af4a138d-7203-4903-84b8-76586f3d9969-ceph" (OuterVolumeSpecName: "ceph") pod "af4a138d-7203-4903-84b8-76586f3d9969" (UID: "af4a138d-7203-4903-84b8-76586f3d9969"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:10:09 crc kubenswrapper[4779]: I0929 10:10:09.399424 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af4a138d-7203-4903-84b8-76586f3d9969-inventory" (OuterVolumeSpecName: "inventory") pod "af4a138d-7203-4903-84b8-76586f3d9969" (UID: "af4a138d-7203-4903-84b8-76586f3d9969"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:10:09 crc kubenswrapper[4779]: I0929 10:10:09.399530 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af4a138d-7203-4903-84b8-76586f3d9969-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "af4a138d-7203-4903-84b8-76586f3d9969" (UID: "af4a138d-7203-4903-84b8-76586f3d9969"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:10:09 crc kubenswrapper[4779]: I0929 10:10:09.476759 4779 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/af4a138d-7203-4903-84b8-76586f3d9969-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 10:10:09 crc kubenswrapper[4779]: I0929 10:10:09.476790 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cwzgh\" (UniqueName: \"kubernetes.io/projected/af4a138d-7203-4903-84b8-76586f3d9969-kube-api-access-cwzgh\") on node \"crc\" DevicePath \"\"" Sep 29 10:10:09 crc kubenswrapper[4779]: I0929 10:10:09.476805 4779 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/af4a138d-7203-4903-84b8-76586f3d9969-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 10:10:09 crc kubenswrapper[4779]: I0929 10:10:09.476814 4779 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/af4a138d-7203-4903-84b8-76586f3d9969-ceph\") on node \"crc\" DevicePath \"\"" Sep 29 10:10:09 crc kubenswrapper[4779]: I0929 10:10:09.819397 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-tzhrj" event={"ID":"af4a138d-7203-4903-84b8-76586f3d9969","Type":"ContainerDied","Data":"0402c896088d5524000514895b1d10af017cba9f093f8bc87d4abdb792ff0a2a"} Sep 29 10:10:09 crc kubenswrapper[4779]: I0929 10:10:09.819438 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0402c896088d5524000514895b1d10af017cba9f093f8bc87d4abdb792ff0a2a" Sep 29 10:10:09 crc kubenswrapper[4779]: I0929 10:10:09.819470 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-tzhrj" Sep 29 10:10:09 crc kubenswrapper[4779]: I0929 10:10:09.907755 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qmwd7"] Sep 29 10:10:09 crc kubenswrapper[4779]: E0929 10:10:09.908327 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af4a138d-7203-4903-84b8-76586f3d9969" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Sep 29 10:10:09 crc kubenswrapper[4779]: I0929 10:10:09.908363 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="af4a138d-7203-4903-84b8-76586f3d9969" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Sep 29 10:10:09 crc kubenswrapper[4779]: I0929 10:10:09.908620 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="af4a138d-7203-4903-84b8-76586f3d9969" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Sep 29 10:10:09 crc kubenswrapper[4779]: I0929 10:10:09.909481 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qmwd7" Sep 29 10:10:09 crc kubenswrapper[4779]: I0929 10:10:09.912522 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 10:10:09 crc kubenswrapper[4779]: I0929 10:10:09.912655 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 10:10:09 crc kubenswrapper[4779]: I0929 10:10:09.912865 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Sep 29 10:10:09 crc kubenswrapper[4779]: I0929 10:10:09.912952 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-l7nsb" Sep 29 10:10:09 crc kubenswrapper[4779]: I0929 10:10:09.913338 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 10:10:09 crc kubenswrapper[4779]: I0929 10:10:09.920452 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qmwd7"] Sep 29 10:10:10 crc kubenswrapper[4779]: I0929 10:10:10.087303 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e9108160-117f-465a-b392-20795018a59d-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-qmwd7\" (UID: \"e9108160-117f-465a-b392-20795018a59d\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qmwd7" Sep 29 10:10:10 crc kubenswrapper[4779]: I0929 10:10:10.087387 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e9108160-117f-465a-b392-20795018a59d-ceph\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-qmwd7\" (UID: \"e9108160-117f-465a-b392-20795018a59d\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qmwd7" Sep 29 10:10:10 crc kubenswrapper[4779]: I0929 10:10:10.087476 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r7svt\" (UniqueName: \"kubernetes.io/projected/e9108160-117f-465a-b392-20795018a59d-kube-api-access-r7svt\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-qmwd7\" (UID: \"e9108160-117f-465a-b392-20795018a59d\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qmwd7" Sep 29 10:10:10 crc kubenswrapper[4779]: I0929 10:10:10.087546 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e9108160-117f-465a-b392-20795018a59d-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-qmwd7\" (UID: \"e9108160-117f-465a-b392-20795018a59d\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qmwd7" Sep 29 10:10:10 crc kubenswrapper[4779]: I0929 10:10:10.189486 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e9108160-117f-465a-b392-20795018a59d-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-qmwd7\" (UID: \"e9108160-117f-465a-b392-20795018a59d\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qmwd7" Sep 29 10:10:10 crc kubenswrapper[4779]: I0929 10:10:10.189848 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e9108160-117f-465a-b392-20795018a59d-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-qmwd7\" (UID: \"e9108160-117f-465a-b392-20795018a59d\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qmwd7" Sep 29 10:10:10 crc kubenswrapper[4779]: I0929 10:10:10.190062 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e9108160-117f-465a-b392-20795018a59d-ceph\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-qmwd7\" (UID: \"e9108160-117f-465a-b392-20795018a59d\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qmwd7" Sep 29 10:10:10 crc kubenswrapper[4779]: I0929 10:10:10.190318 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r7svt\" (UniqueName: \"kubernetes.io/projected/e9108160-117f-465a-b392-20795018a59d-kube-api-access-r7svt\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-qmwd7\" (UID: \"e9108160-117f-465a-b392-20795018a59d\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qmwd7" Sep 29 10:10:10 crc kubenswrapper[4779]: I0929 10:10:10.194888 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e9108160-117f-465a-b392-20795018a59d-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-qmwd7\" (UID: \"e9108160-117f-465a-b392-20795018a59d\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qmwd7" Sep 29 10:10:10 crc kubenswrapper[4779]: I0929 10:10:10.195212 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e9108160-117f-465a-b392-20795018a59d-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-qmwd7\" (UID: \"e9108160-117f-465a-b392-20795018a59d\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qmwd7" Sep 29 10:10:10 crc kubenswrapper[4779]: I0929 10:10:10.197307 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e9108160-117f-465a-b392-20795018a59d-ceph\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-qmwd7\" (UID: \"e9108160-117f-465a-b392-20795018a59d\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qmwd7" Sep 29 10:10:10 crc kubenswrapper[4779]: I0929 10:10:10.206557 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r7svt\" (UniqueName: \"kubernetes.io/projected/e9108160-117f-465a-b392-20795018a59d-kube-api-access-r7svt\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-qmwd7\" (UID: \"e9108160-117f-465a-b392-20795018a59d\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qmwd7" Sep 29 10:10:10 crc kubenswrapper[4779]: I0929 10:10:10.234600 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qmwd7" Sep 29 10:10:10 crc kubenswrapper[4779]: I0929 10:10:10.564256 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qmwd7"] Sep 29 10:10:10 crc kubenswrapper[4779]: I0929 10:10:10.829787 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qmwd7" event={"ID":"e9108160-117f-465a-b392-20795018a59d","Type":"ContainerStarted","Data":"9699e8e4747b890307816e3bb1064d0392e475200eddcc507ecf05f1472a2dee"} Sep 29 10:10:11 crc kubenswrapper[4779]: I0929 10:10:11.848251 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qmwd7" event={"ID":"e9108160-117f-465a-b392-20795018a59d","Type":"ContainerStarted","Data":"e6f3f07472c07e35c8183396d3eb927fca2aeb0be9e5814d5e9083d471856c4a"} Sep 29 10:10:11 crc kubenswrapper[4779]: I0929 10:10:11.879763 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qmwd7" podStartSLOduration=2.301380433 podStartE2EDuration="2.879743801s" podCreationTimestamp="2025-09-29 10:10:09 +0000 UTC" firstStartedPulling="2025-09-29 10:10:10.570777056 +0000 UTC m=+2442.552100960" lastFinishedPulling="2025-09-29 10:10:11.149140424 +0000 UTC m=+2443.130464328" observedRunningTime="2025-09-29 10:10:11.872229852 +0000 UTC m=+2443.853553766" watchObservedRunningTime="2025-09-29 10:10:11.879743801 +0000 UTC m=+2443.861067705" Sep 29 10:10:13 crc kubenswrapper[4779]: I0929 10:10:13.715471 4779 scope.go:117] "RemoveContainer" containerID="6c3b4d9e1288a1d587469d8a17efdcacdfb592049e65b194a12258479174566e" Sep 29 10:10:13 crc kubenswrapper[4779]: E0929 10:10:13.716337 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:10:21 crc kubenswrapper[4779]: I0929 10:10:21.936609 4779 generic.go:334] "Generic (PLEG): container finished" podID="e9108160-117f-465a-b392-20795018a59d" containerID="e6f3f07472c07e35c8183396d3eb927fca2aeb0be9e5814d5e9083d471856c4a" exitCode=0 Sep 29 10:10:21 crc kubenswrapper[4779]: I0929 10:10:21.936697 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qmwd7" event={"ID":"e9108160-117f-465a-b392-20795018a59d","Type":"ContainerDied","Data":"e6f3f07472c07e35c8183396d3eb927fca2aeb0be9e5814d5e9083d471856c4a"} Sep 29 10:10:23 crc kubenswrapper[4779]: I0929 10:10:23.405828 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qmwd7" Sep 29 10:10:23 crc kubenswrapper[4779]: I0929 10:10:23.585855 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e9108160-117f-465a-b392-20795018a59d-inventory\") pod \"e9108160-117f-465a-b392-20795018a59d\" (UID: \"e9108160-117f-465a-b392-20795018a59d\") " Sep 29 10:10:23 crc kubenswrapper[4779]: I0929 10:10:23.586187 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e9108160-117f-465a-b392-20795018a59d-ssh-key\") pod \"e9108160-117f-465a-b392-20795018a59d\" (UID: \"e9108160-117f-465a-b392-20795018a59d\") " Sep 29 10:10:23 crc kubenswrapper[4779]: I0929 10:10:23.586266 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r7svt\" (UniqueName: \"kubernetes.io/projected/e9108160-117f-465a-b392-20795018a59d-kube-api-access-r7svt\") pod \"e9108160-117f-465a-b392-20795018a59d\" (UID: \"e9108160-117f-465a-b392-20795018a59d\") " Sep 29 10:10:23 crc kubenswrapper[4779]: I0929 10:10:23.586363 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e9108160-117f-465a-b392-20795018a59d-ceph\") pod \"e9108160-117f-465a-b392-20795018a59d\" (UID: \"e9108160-117f-465a-b392-20795018a59d\") " Sep 29 10:10:23 crc kubenswrapper[4779]: I0929 10:10:23.593284 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e9108160-117f-465a-b392-20795018a59d-kube-api-access-r7svt" (OuterVolumeSpecName: "kube-api-access-r7svt") pod "e9108160-117f-465a-b392-20795018a59d" (UID: "e9108160-117f-465a-b392-20795018a59d"). InnerVolumeSpecName "kube-api-access-r7svt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 10:10:23 crc kubenswrapper[4779]: I0929 10:10:23.595047 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9108160-117f-465a-b392-20795018a59d-ceph" (OuterVolumeSpecName: "ceph") pod "e9108160-117f-465a-b392-20795018a59d" (UID: "e9108160-117f-465a-b392-20795018a59d"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:10:23 crc kubenswrapper[4779]: I0929 10:10:23.616719 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9108160-117f-465a-b392-20795018a59d-inventory" (OuterVolumeSpecName: "inventory") pod "e9108160-117f-465a-b392-20795018a59d" (UID: "e9108160-117f-465a-b392-20795018a59d"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:10:23 crc kubenswrapper[4779]: I0929 10:10:23.630196 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9108160-117f-465a-b392-20795018a59d-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "e9108160-117f-465a-b392-20795018a59d" (UID: "e9108160-117f-465a-b392-20795018a59d"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:10:23 crc kubenswrapper[4779]: I0929 10:10:23.690352 4779 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e9108160-117f-465a-b392-20795018a59d-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 10:10:23 crc kubenswrapper[4779]: I0929 10:10:23.690428 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r7svt\" (UniqueName: \"kubernetes.io/projected/e9108160-117f-465a-b392-20795018a59d-kube-api-access-r7svt\") on node \"crc\" DevicePath \"\"" Sep 29 10:10:23 crc kubenswrapper[4779]: I0929 10:10:23.690451 4779 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e9108160-117f-465a-b392-20795018a59d-ceph\") on node \"crc\" DevicePath \"\"" Sep 29 10:10:23 crc kubenswrapper[4779]: I0929 10:10:23.690468 4779 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e9108160-117f-465a-b392-20795018a59d-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 10:10:23 crc kubenswrapper[4779]: I0929 10:10:23.961048 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qmwd7" event={"ID":"e9108160-117f-465a-b392-20795018a59d","Type":"ContainerDied","Data":"9699e8e4747b890307816e3bb1064d0392e475200eddcc507ecf05f1472a2dee"} Sep 29 10:10:23 crc kubenswrapper[4779]: I0929 10:10:23.961177 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9699e8e4747b890307816e3bb1064d0392e475200eddcc507ecf05f1472a2dee" Sep 29 10:10:23 crc kubenswrapper[4779]: I0929 10:10:23.961206 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qmwd7" Sep 29 10:10:24 crc kubenswrapper[4779]: I0929 10:10:24.063134 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gsz77"] Sep 29 10:10:24 crc kubenswrapper[4779]: E0929 10:10:24.063883 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9108160-117f-465a-b392-20795018a59d" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Sep 29 10:10:24 crc kubenswrapper[4779]: I0929 10:10:24.063969 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9108160-117f-465a-b392-20795018a59d" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Sep 29 10:10:24 crc kubenswrapper[4779]: I0929 10:10:24.064180 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9108160-117f-465a-b392-20795018a59d" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Sep 29 10:10:24 crc kubenswrapper[4779]: I0929 10:10:24.064888 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gsz77" Sep 29 10:10:24 crc kubenswrapper[4779]: I0929 10:10:24.067655 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Sep 29 10:10:24 crc kubenswrapper[4779]: I0929 10:10:24.068576 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 10:10:24 crc kubenswrapper[4779]: I0929 10:10:24.068580 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Sep 29 10:10:24 crc kubenswrapper[4779]: I0929 10:10:24.068610 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Sep 29 10:10:24 crc kubenswrapper[4779]: I0929 10:10:24.068647 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Sep 29 10:10:24 crc kubenswrapper[4779]: I0929 10:10:24.069496 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-l7nsb" Sep 29 10:10:24 crc kubenswrapper[4779]: I0929 10:10:24.069625 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-default-certs-0" Sep 29 10:10:24 crc kubenswrapper[4779]: I0929 10:10:24.069749 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 10:10:24 crc kubenswrapper[4779]: I0929 10:10:24.069922 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 10:10:24 crc kubenswrapper[4779]: I0929 10:10:24.084805 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gsz77"] Sep 29 10:10:24 crc kubenswrapper[4779]: I0929 10:10:24.097561 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4h5hh\" (UniqueName: \"kubernetes.io/projected/02785720-b248-42e2-93a3-ccda8cdb2950-kube-api-access-4h5hh\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gsz77\" (UID: \"02785720-b248-42e2-93a3-ccda8cdb2950\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gsz77" Sep 29 10:10:24 crc kubenswrapper[4779]: I0929 10:10:24.097658 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/02785720-b248-42e2-93a3-ccda8cdb2950-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gsz77\" (UID: \"02785720-b248-42e2-93a3-ccda8cdb2950\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gsz77" Sep 29 10:10:24 crc kubenswrapper[4779]: I0929 10:10:24.097723 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/02785720-b248-42e2-93a3-ccda8cdb2950-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gsz77\" (UID: \"02785720-b248-42e2-93a3-ccda8cdb2950\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gsz77" Sep 29 10:10:24 crc kubenswrapper[4779]: I0929 10:10:24.097752 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/02785720-b248-42e2-93a3-ccda8cdb2950-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gsz77\" (UID: \"02785720-b248-42e2-93a3-ccda8cdb2950\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gsz77" Sep 29 10:10:24 crc kubenswrapper[4779]: I0929 10:10:24.097772 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02785720-b248-42e2-93a3-ccda8cdb2950-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gsz77\" (UID: \"02785720-b248-42e2-93a3-ccda8cdb2950\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gsz77" Sep 29 10:10:24 crc kubenswrapper[4779]: I0929 10:10:24.097809 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02785720-b248-42e2-93a3-ccda8cdb2950-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gsz77\" (UID: \"02785720-b248-42e2-93a3-ccda8cdb2950\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gsz77" Sep 29 10:10:24 crc kubenswrapper[4779]: I0929 10:10:24.097835 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/02785720-b248-42e2-93a3-ccda8cdb2950-ceph\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gsz77\" (UID: \"02785720-b248-42e2-93a3-ccda8cdb2950\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gsz77" Sep 29 10:10:24 crc kubenswrapper[4779]: I0929 10:10:24.097858 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02785720-b248-42e2-93a3-ccda8cdb2950-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gsz77\" (UID: \"02785720-b248-42e2-93a3-ccda8cdb2950\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gsz77" Sep 29 10:10:24 crc kubenswrapper[4779]: I0929 10:10:24.097876 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/02785720-b248-42e2-93a3-ccda8cdb2950-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gsz77\" (UID: \"02785720-b248-42e2-93a3-ccda8cdb2950\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gsz77" Sep 29 10:10:24 crc kubenswrapper[4779]: I0929 10:10:24.097894 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/02785720-b248-42e2-93a3-ccda8cdb2950-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gsz77\" (UID: \"02785720-b248-42e2-93a3-ccda8cdb2950\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gsz77" Sep 29 10:10:24 crc kubenswrapper[4779]: I0929 10:10:24.098010 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02785720-b248-42e2-93a3-ccda8cdb2950-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gsz77\" (UID: \"02785720-b248-42e2-93a3-ccda8cdb2950\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gsz77" Sep 29 10:10:24 crc kubenswrapper[4779]: I0929 10:10:24.098038 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02785720-b248-42e2-93a3-ccda8cdb2950-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gsz77\" (UID: \"02785720-b248-42e2-93a3-ccda8cdb2950\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gsz77" Sep 29 10:10:24 crc kubenswrapper[4779]: I0929 10:10:24.098090 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02785720-b248-42e2-93a3-ccda8cdb2950-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gsz77\" (UID: \"02785720-b248-42e2-93a3-ccda8cdb2950\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gsz77" Sep 29 10:10:24 crc kubenswrapper[4779]: I0929 10:10:24.098112 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/02785720-b248-42e2-93a3-ccda8cdb2950-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gsz77\" (UID: \"02785720-b248-42e2-93a3-ccda8cdb2950\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gsz77" Sep 29 10:10:24 crc kubenswrapper[4779]: I0929 10:10:24.098134 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02785720-b248-42e2-93a3-ccda8cdb2950-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gsz77\" (UID: \"02785720-b248-42e2-93a3-ccda8cdb2950\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gsz77" Sep 29 10:10:24 crc kubenswrapper[4779]: I0929 10:10:24.199431 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/02785720-b248-42e2-93a3-ccda8cdb2950-ceph\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gsz77\" (UID: \"02785720-b248-42e2-93a3-ccda8cdb2950\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gsz77" Sep 29 10:10:24 crc kubenswrapper[4779]: I0929 10:10:24.199479 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02785720-b248-42e2-93a3-ccda8cdb2950-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gsz77\" (UID: \"02785720-b248-42e2-93a3-ccda8cdb2950\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gsz77" Sep 29 10:10:24 crc kubenswrapper[4779]: I0929 10:10:24.199505 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/02785720-b248-42e2-93a3-ccda8cdb2950-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gsz77\" (UID: \"02785720-b248-42e2-93a3-ccda8cdb2950\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gsz77" Sep 29 10:10:24 crc kubenswrapper[4779]: I0929 10:10:24.199526 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/02785720-b248-42e2-93a3-ccda8cdb2950-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gsz77\" (UID: \"02785720-b248-42e2-93a3-ccda8cdb2950\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gsz77" Sep 29 10:10:24 crc kubenswrapper[4779]: I0929 10:10:24.199564 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02785720-b248-42e2-93a3-ccda8cdb2950-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gsz77\" (UID: \"02785720-b248-42e2-93a3-ccda8cdb2950\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gsz77" Sep 29 10:10:24 crc kubenswrapper[4779]: I0929 10:10:24.199593 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02785720-b248-42e2-93a3-ccda8cdb2950-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gsz77\" (UID: \"02785720-b248-42e2-93a3-ccda8cdb2950\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gsz77" Sep 29 10:10:24 crc kubenswrapper[4779]: I0929 10:10:24.199650 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02785720-b248-42e2-93a3-ccda8cdb2950-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gsz77\" (UID: \"02785720-b248-42e2-93a3-ccda8cdb2950\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gsz77" Sep 29 10:10:24 crc kubenswrapper[4779]: I0929 10:10:24.199678 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/02785720-b248-42e2-93a3-ccda8cdb2950-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gsz77\" (UID: \"02785720-b248-42e2-93a3-ccda8cdb2950\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gsz77" Sep 29 10:10:24 crc kubenswrapper[4779]: I0929 10:10:24.199703 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02785720-b248-42e2-93a3-ccda8cdb2950-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gsz77\" (UID: \"02785720-b248-42e2-93a3-ccda8cdb2950\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gsz77" Sep 29 10:10:24 crc kubenswrapper[4779]: I0929 10:10:24.199740 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4h5hh\" (UniqueName: \"kubernetes.io/projected/02785720-b248-42e2-93a3-ccda8cdb2950-kube-api-access-4h5hh\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gsz77\" (UID: \"02785720-b248-42e2-93a3-ccda8cdb2950\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gsz77" Sep 29 10:10:24 crc kubenswrapper[4779]: I0929 10:10:24.199794 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/02785720-b248-42e2-93a3-ccda8cdb2950-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gsz77\" (UID: \"02785720-b248-42e2-93a3-ccda8cdb2950\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gsz77" Sep 29 10:10:24 crc kubenswrapper[4779]: I0929 10:10:24.199847 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/02785720-b248-42e2-93a3-ccda8cdb2950-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gsz77\" (UID: \"02785720-b248-42e2-93a3-ccda8cdb2950\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gsz77" Sep 29 10:10:24 crc kubenswrapper[4779]: I0929 10:10:24.199877 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/02785720-b248-42e2-93a3-ccda8cdb2950-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gsz77\" (UID: \"02785720-b248-42e2-93a3-ccda8cdb2950\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gsz77" Sep 29 10:10:24 crc kubenswrapper[4779]: I0929 10:10:24.199894 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02785720-b248-42e2-93a3-ccda8cdb2950-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gsz77\" (UID: \"02785720-b248-42e2-93a3-ccda8cdb2950\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gsz77" Sep 29 10:10:24 crc kubenswrapper[4779]: I0929 10:10:24.199950 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02785720-b248-42e2-93a3-ccda8cdb2950-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gsz77\" (UID: \"02785720-b248-42e2-93a3-ccda8cdb2950\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gsz77" Sep 29 10:10:24 crc kubenswrapper[4779]: I0929 10:10:24.205311 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02785720-b248-42e2-93a3-ccda8cdb2950-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gsz77\" (UID: \"02785720-b248-42e2-93a3-ccda8cdb2950\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gsz77" Sep 29 10:10:24 crc kubenswrapper[4779]: I0929 10:10:24.205344 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02785720-b248-42e2-93a3-ccda8cdb2950-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gsz77\" (UID: \"02785720-b248-42e2-93a3-ccda8cdb2950\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gsz77" Sep 29 10:10:24 crc kubenswrapper[4779]: I0929 10:10:24.205318 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/02785720-b248-42e2-93a3-ccda8cdb2950-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gsz77\" (UID: \"02785720-b248-42e2-93a3-ccda8cdb2950\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gsz77" Sep 29 10:10:24 crc kubenswrapper[4779]: I0929 10:10:24.205763 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02785720-b248-42e2-93a3-ccda8cdb2950-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gsz77\" (UID: \"02785720-b248-42e2-93a3-ccda8cdb2950\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gsz77" Sep 29 10:10:24 crc kubenswrapper[4779]: I0929 10:10:24.207127 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02785720-b248-42e2-93a3-ccda8cdb2950-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gsz77\" (UID: \"02785720-b248-42e2-93a3-ccda8cdb2950\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gsz77" Sep 29 10:10:24 crc kubenswrapper[4779]: I0929 10:10:24.207337 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/02785720-b248-42e2-93a3-ccda8cdb2950-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gsz77\" (UID: \"02785720-b248-42e2-93a3-ccda8cdb2950\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gsz77" Sep 29 10:10:24 crc kubenswrapper[4779]: I0929 10:10:24.207769 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02785720-b248-42e2-93a3-ccda8cdb2950-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gsz77\" (UID: \"02785720-b248-42e2-93a3-ccda8cdb2950\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gsz77" Sep 29 10:10:24 crc kubenswrapper[4779]: I0929 10:10:24.207894 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02785720-b248-42e2-93a3-ccda8cdb2950-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gsz77\" (UID: \"02785720-b248-42e2-93a3-ccda8cdb2950\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gsz77" Sep 29 10:10:24 crc kubenswrapper[4779]: I0929 10:10:24.208302 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/02785720-b248-42e2-93a3-ccda8cdb2950-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gsz77\" (UID: \"02785720-b248-42e2-93a3-ccda8cdb2950\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gsz77" Sep 29 10:10:24 crc kubenswrapper[4779]: I0929 10:10:24.208511 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/02785720-b248-42e2-93a3-ccda8cdb2950-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gsz77\" (UID: \"02785720-b248-42e2-93a3-ccda8cdb2950\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gsz77" Sep 29 10:10:24 crc kubenswrapper[4779]: I0929 10:10:24.209144 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/02785720-b248-42e2-93a3-ccda8cdb2950-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gsz77\" (UID: \"02785720-b248-42e2-93a3-ccda8cdb2950\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gsz77" Sep 29 10:10:24 crc kubenswrapper[4779]: I0929 10:10:24.209669 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/02785720-b248-42e2-93a3-ccda8cdb2950-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gsz77\" (UID: \"02785720-b248-42e2-93a3-ccda8cdb2950\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gsz77" Sep 29 10:10:24 crc kubenswrapper[4779]: I0929 10:10:24.210277 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/02785720-b248-42e2-93a3-ccda8cdb2950-ceph\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gsz77\" (UID: \"02785720-b248-42e2-93a3-ccda8cdb2950\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gsz77" Sep 29 10:10:24 crc kubenswrapper[4779]: I0929 10:10:24.210327 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02785720-b248-42e2-93a3-ccda8cdb2950-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gsz77\" (UID: \"02785720-b248-42e2-93a3-ccda8cdb2950\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gsz77" Sep 29 10:10:24 crc kubenswrapper[4779]: I0929 10:10:24.220643 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4h5hh\" (UniqueName: \"kubernetes.io/projected/02785720-b248-42e2-93a3-ccda8cdb2950-kube-api-access-4h5hh\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-gsz77\" (UID: \"02785720-b248-42e2-93a3-ccda8cdb2950\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gsz77" Sep 29 10:10:24 crc kubenswrapper[4779]: I0929 10:10:24.388917 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gsz77" Sep 29 10:10:24 crc kubenswrapper[4779]: I0929 10:10:24.945628 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gsz77"] Sep 29 10:10:24 crc kubenswrapper[4779]: I0929 10:10:24.973036 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gsz77" event={"ID":"02785720-b248-42e2-93a3-ccda8cdb2950","Type":"ContainerStarted","Data":"a0fc611394db9eab323fad0e54cff5365cd9dd1cc7c8a8306a001dd9e1b2ea63"} Sep 29 10:10:25 crc kubenswrapper[4779]: I0929 10:10:25.714818 4779 scope.go:117] "RemoveContainer" containerID="6c3b4d9e1288a1d587469d8a17efdcacdfb592049e65b194a12258479174566e" Sep 29 10:10:25 crc kubenswrapper[4779]: E0929 10:10:25.715401 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:10:25 crc kubenswrapper[4779]: I0929 10:10:25.983666 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gsz77" event={"ID":"02785720-b248-42e2-93a3-ccda8cdb2950","Type":"ContainerStarted","Data":"4aba24f08e355e28aad23b84836bca2857ddaedea75a407cd10db92a5832b7db"} Sep 29 10:10:26 crc kubenswrapper[4779]: I0929 10:10:26.021873 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gsz77" podStartSLOduration=1.56157499 podStartE2EDuration="2.021843462s" podCreationTimestamp="2025-09-29 10:10:24 +0000 UTC" firstStartedPulling="2025-09-29 10:10:24.957291979 +0000 UTC m=+2456.938615883" lastFinishedPulling="2025-09-29 10:10:25.417560451 +0000 UTC m=+2457.398884355" observedRunningTime="2025-09-29 10:10:26.005362913 +0000 UTC m=+2457.986686817" watchObservedRunningTime="2025-09-29 10:10:26.021843462 +0000 UTC m=+2458.003167406" Sep 29 10:10:36 crc kubenswrapper[4779]: I0929 10:10:36.715195 4779 scope.go:117] "RemoveContainer" containerID="6c3b4d9e1288a1d587469d8a17efdcacdfb592049e65b194a12258479174566e" Sep 29 10:10:36 crc kubenswrapper[4779]: E0929 10:10:36.716421 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:10:50 crc kubenswrapper[4779]: I0929 10:10:50.715027 4779 scope.go:117] "RemoveContainer" containerID="6c3b4d9e1288a1d587469d8a17efdcacdfb592049e65b194a12258479174566e" Sep 29 10:10:50 crc kubenswrapper[4779]: E0929 10:10:50.715822 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:11:03 crc kubenswrapper[4779]: I0929 10:11:03.714381 4779 scope.go:117] "RemoveContainer" containerID="6c3b4d9e1288a1d587469d8a17efdcacdfb592049e65b194a12258479174566e" Sep 29 10:11:03 crc kubenswrapper[4779]: E0929 10:11:03.715134 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:11:06 crc kubenswrapper[4779]: I0929 10:11:06.359897 4779 generic.go:334] "Generic (PLEG): container finished" podID="02785720-b248-42e2-93a3-ccda8cdb2950" containerID="4aba24f08e355e28aad23b84836bca2857ddaedea75a407cd10db92a5832b7db" exitCode=0 Sep 29 10:11:06 crc kubenswrapper[4779]: I0929 10:11:06.360288 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gsz77" event={"ID":"02785720-b248-42e2-93a3-ccda8cdb2950","Type":"ContainerDied","Data":"4aba24f08e355e28aad23b84836bca2857ddaedea75a407cd10db92a5832b7db"} Sep 29 10:11:07 crc kubenswrapper[4779]: I0929 10:11:07.923347 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gsz77" Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.086308 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02785720-b248-42e2-93a3-ccda8cdb2950-bootstrap-combined-ca-bundle\") pod \"02785720-b248-42e2-93a3-ccda8cdb2950\" (UID: \"02785720-b248-42e2-93a3-ccda8cdb2950\") " Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.086347 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02785720-b248-42e2-93a3-ccda8cdb2950-libvirt-combined-ca-bundle\") pod \"02785720-b248-42e2-93a3-ccda8cdb2950\" (UID: \"02785720-b248-42e2-93a3-ccda8cdb2950\") " Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.086390 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/02785720-b248-42e2-93a3-ccda8cdb2950-openstack-edpm-ipam-ovn-default-certs-0\") pod \"02785720-b248-42e2-93a3-ccda8cdb2950\" (UID: \"02785720-b248-42e2-93a3-ccda8cdb2950\") " Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.086429 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02785720-b248-42e2-93a3-ccda8cdb2950-nova-combined-ca-bundle\") pod \"02785720-b248-42e2-93a3-ccda8cdb2950\" (UID: \"02785720-b248-42e2-93a3-ccda8cdb2950\") " Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.086500 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/02785720-b248-42e2-93a3-ccda8cdb2950-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"02785720-b248-42e2-93a3-ccda8cdb2950\" (UID: \"02785720-b248-42e2-93a3-ccda8cdb2950\") " Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.086571 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/02785720-b248-42e2-93a3-ccda8cdb2950-inventory\") pod \"02785720-b248-42e2-93a3-ccda8cdb2950\" (UID: \"02785720-b248-42e2-93a3-ccda8cdb2950\") " Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.086621 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/02785720-b248-42e2-93a3-ccda8cdb2950-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"02785720-b248-42e2-93a3-ccda8cdb2950\" (UID: \"02785720-b248-42e2-93a3-ccda8cdb2950\") " Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.086675 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02785720-b248-42e2-93a3-ccda8cdb2950-telemetry-combined-ca-bundle\") pod \"02785720-b248-42e2-93a3-ccda8cdb2950\" (UID: \"02785720-b248-42e2-93a3-ccda8cdb2950\") " Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.086706 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/02785720-b248-42e2-93a3-ccda8cdb2950-ceph\") pod \"02785720-b248-42e2-93a3-ccda8cdb2950\" (UID: \"02785720-b248-42e2-93a3-ccda8cdb2950\") " Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.086725 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02785720-b248-42e2-93a3-ccda8cdb2950-neutron-metadata-combined-ca-bundle\") pod \"02785720-b248-42e2-93a3-ccda8cdb2950\" (UID: \"02785720-b248-42e2-93a3-ccda8cdb2950\") " Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.086753 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4h5hh\" (UniqueName: \"kubernetes.io/projected/02785720-b248-42e2-93a3-ccda8cdb2950-kube-api-access-4h5hh\") pod \"02785720-b248-42e2-93a3-ccda8cdb2950\" (UID: \"02785720-b248-42e2-93a3-ccda8cdb2950\") " Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.086809 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02785720-b248-42e2-93a3-ccda8cdb2950-ovn-combined-ca-bundle\") pod \"02785720-b248-42e2-93a3-ccda8cdb2950\" (UID: \"02785720-b248-42e2-93a3-ccda8cdb2950\") " Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.086826 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02785720-b248-42e2-93a3-ccda8cdb2950-repo-setup-combined-ca-bundle\") pod \"02785720-b248-42e2-93a3-ccda8cdb2950\" (UID: \"02785720-b248-42e2-93a3-ccda8cdb2950\") " Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.086843 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/02785720-b248-42e2-93a3-ccda8cdb2950-ssh-key\") pod \"02785720-b248-42e2-93a3-ccda8cdb2950\" (UID: \"02785720-b248-42e2-93a3-ccda8cdb2950\") " Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.086865 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/02785720-b248-42e2-93a3-ccda8cdb2950-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"02785720-b248-42e2-93a3-ccda8cdb2950\" (UID: \"02785720-b248-42e2-93a3-ccda8cdb2950\") " Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.094351 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02785720-b248-42e2-93a3-ccda8cdb2950-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "02785720-b248-42e2-93a3-ccda8cdb2950" (UID: "02785720-b248-42e2-93a3-ccda8cdb2950"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.094497 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/02785720-b248-42e2-93a3-ccda8cdb2950-kube-api-access-4h5hh" (OuterVolumeSpecName: "kube-api-access-4h5hh") pod "02785720-b248-42e2-93a3-ccda8cdb2950" (UID: "02785720-b248-42e2-93a3-ccda8cdb2950"). InnerVolumeSpecName "kube-api-access-4h5hh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.095001 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02785720-b248-42e2-93a3-ccda8cdb2950-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "02785720-b248-42e2-93a3-ccda8cdb2950" (UID: "02785720-b248-42e2-93a3-ccda8cdb2950"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.095072 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/02785720-b248-42e2-93a3-ccda8cdb2950-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "02785720-b248-42e2-93a3-ccda8cdb2950" (UID: "02785720-b248-42e2-93a3-ccda8cdb2950"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.095096 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/02785720-b248-42e2-93a3-ccda8cdb2950-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "02785720-b248-42e2-93a3-ccda8cdb2950" (UID: "02785720-b248-42e2-93a3-ccda8cdb2950"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.095429 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02785720-b248-42e2-93a3-ccda8cdb2950-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "02785720-b248-42e2-93a3-ccda8cdb2950" (UID: "02785720-b248-42e2-93a3-ccda8cdb2950"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.095436 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/02785720-b248-42e2-93a3-ccda8cdb2950-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "02785720-b248-42e2-93a3-ccda8cdb2950" (UID: "02785720-b248-42e2-93a3-ccda8cdb2950"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.095481 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02785720-b248-42e2-93a3-ccda8cdb2950-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "02785720-b248-42e2-93a3-ccda8cdb2950" (UID: "02785720-b248-42e2-93a3-ccda8cdb2950"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.096228 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02785720-b248-42e2-93a3-ccda8cdb2950-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "02785720-b248-42e2-93a3-ccda8cdb2950" (UID: "02785720-b248-42e2-93a3-ccda8cdb2950"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.096580 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02785720-b248-42e2-93a3-ccda8cdb2950-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "02785720-b248-42e2-93a3-ccda8cdb2950" (UID: "02785720-b248-42e2-93a3-ccda8cdb2950"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.096935 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02785720-b248-42e2-93a3-ccda8cdb2950-ceph" (OuterVolumeSpecName: "ceph") pod "02785720-b248-42e2-93a3-ccda8cdb2950" (UID: "02785720-b248-42e2-93a3-ccda8cdb2950"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.096985 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02785720-b248-42e2-93a3-ccda8cdb2950-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "02785720-b248-42e2-93a3-ccda8cdb2950" (UID: "02785720-b248-42e2-93a3-ccda8cdb2950"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.097247 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/02785720-b248-42e2-93a3-ccda8cdb2950-openstack-edpm-ipam-telemetry-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-telemetry-default-certs-0") pod "02785720-b248-42e2-93a3-ccda8cdb2950" (UID: "02785720-b248-42e2-93a3-ccda8cdb2950"). InnerVolumeSpecName "openstack-edpm-ipam-telemetry-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.119537 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02785720-b248-42e2-93a3-ccda8cdb2950-inventory" (OuterVolumeSpecName: "inventory") pod "02785720-b248-42e2-93a3-ccda8cdb2950" (UID: "02785720-b248-42e2-93a3-ccda8cdb2950"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.132964 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02785720-b248-42e2-93a3-ccda8cdb2950-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "02785720-b248-42e2-93a3-ccda8cdb2950" (UID: "02785720-b248-42e2-93a3-ccda8cdb2950"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.191254 4779 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/02785720-b248-42e2-93a3-ccda8cdb2950-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.191289 4779 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/02785720-b248-42e2-93a3-ccda8cdb2950-openstack-edpm-ipam-telemetry-default-certs-0\") on node \"crc\" DevicePath \"\"" Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.191303 4779 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02785720-b248-42e2-93a3-ccda8cdb2950-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.191316 4779 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/02785720-b248-42e2-93a3-ccda8cdb2950-ceph\") on node \"crc\" DevicePath \"\"" Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.191324 4779 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02785720-b248-42e2-93a3-ccda8cdb2950-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.191333 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4h5hh\" (UniqueName: \"kubernetes.io/projected/02785720-b248-42e2-93a3-ccda8cdb2950-kube-api-access-4h5hh\") on node \"crc\" DevicePath \"\"" Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.191342 4779 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02785720-b248-42e2-93a3-ccda8cdb2950-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.191349 4779 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02785720-b248-42e2-93a3-ccda8cdb2950-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.191357 4779 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/02785720-b248-42e2-93a3-ccda8cdb2950-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.191365 4779 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/02785720-b248-42e2-93a3-ccda8cdb2950-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.191375 4779 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02785720-b248-42e2-93a3-ccda8cdb2950-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.191384 4779 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02785720-b248-42e2-93a3-ccda8cdb2950-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.191393 4779 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/02785720-b248-42e2-93a3-ccda8cdb2950-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.191402 4779 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02785720-b248-42e2-93a3-ccda8cdb2950-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.191412 4779 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/02785720-b248-42e2-93a3-ccda8cdb2950-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.381181 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gsz77" event={"ID":"02785720-b248-42e2-93a3-ccda8cdb2950","Type":"ContainerDied","Data":"a0fc611394db9eab323fad0e54cff5365cd9dd1cc7c8a8306a001dd9e1b2ea63"} Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.381232 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a0fc611394db9eab323fad0e54cff5365cd9dd1cc7c8a8306a001dd9e1b2ea63" Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.381256 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-gsz77" Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.487199 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-w88p4"] Sep 29 10:11:08 crc kubenswrapper[4779]: E0929 10:11:08.487683 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02785720-b248-42e2-93a3-ccda8cdb2950" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.487707 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="02785720-b248-42e2-93a3-ccda8cdb2950" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.487998 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="02785720-b248-42e2-93a3-ccda8cdb2950" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.488838 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-w88p4" Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.491083 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.491330 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.491510 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-l7nsb" Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.491703 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.491769 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.496385 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b564f44b-9cb9-4ce1-894d-2c88056ae4ee-inventory\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-w88p4\" (UID: \"b564f44b-9cb9-4ce1-894d-2c88056ae4ee\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-w88p4" Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.496487 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b564f44b-9cb9-4ce1-894d-2c88056ae4ee-ssh-key\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-w88p4\" (UID: \"b564f44b-9cb9-4ce1-894d-2c88056ae4ee\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-w88p4" Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.496588 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b564f44b-9cb9-4ce1-894d-2c88056ae4ee-ceph\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-w88p4\" (UID: \"b564f44b-9cb9-4ce1-894d-2c88056ae4ee\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-w88p4" Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.496664 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jsj9b\" (UniqueName: \"kubernetes.io/projected/b564f44b-9cb9-4ce1-894d-2c88056ae4ee-kube-api-access-jsj9b\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-w88p4\" (UID: \"b564f44b-9cb9-4ce1-894d-2c88056ae4ee\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-w88p4" Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.498967 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-w88p4"] Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.597870 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b564f44b-9cb9-4ce1-894d-2c88056ae4ee-inventory\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-w88p4\" (UID: \"b564f44b-9cb9-4ce1-894d-2c88056ae4ee\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-w88p4" Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.597971 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b564f44b-9cb9-4ce1-894d-2c88056ae4ee-ssh-key\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-w88p4\" (UID: \"b564f44b-9cb9-4ce1-894d-2c88056ae4ee\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-w88p4" Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.598028 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b564f44b-9cb9-4ce1-894d-2c88056ae4ee-ceph\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-w88p4\" (UID: \"b564f44b-9cb9-4ce1-894d-2c88056ae4ee\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-w88p4" Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.598055 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jsj9b\" (UniqueName: \"kubernetes.io/projected/b564f44b-9cb9-4ce1-894d-2c88056ae4ee-kube-api-access-jsj9b\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-w88p4\" (UID: \"b564f44b-9cb9-4ce1-894d-2c88056ae4ee\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-w88p4" Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.602112 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b564f44b-9cb9-4ce1-894d-2c88056ae4ee-ceph\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-w88p4\" (UID: \"b564f44b-9cb9-4ce1-894d-2c88056ae4ee\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-w88p4" Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.602284 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b564f44b-9cb9-4ce1-894d-2c88056ae4ee-ssh-key\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-w88p4\" (UID: \"b564f44b-9cb9-4ce1-894d-2c88056ae4ee\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-w88p4" Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.610350 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b564f44b-9cb9-4ce1-894d-2c88056ae4ee-inventory\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-w88p4\" (UID: \"b564f44b-9cb9-4ce1-894d-2c88056ae4ee\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-w88p4" Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.616741 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jsj9b\" (UniqueName: \"kubernetes.io/projected/b564f44b-9cb9-4ce1-894d-2c88056ae4ee-kube-api-access-jsj9b\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-w88p4\" (UID: \"b564f44b-9cb9-4ce1-894d-2c88056ae4ee\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-w88p4" Sep 29 10:11:08 crc kubenswrapper[4779]: I0929 10:11:08.825649 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-w88p4" Sep 29 10:11:09 crc kubenswrapper[4779]: I0929 10:11:09.375223 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-w88p4"] Sep 29 10:11:09 crc kubenswrapper[4779]: I0929 10:11:09.393362 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-w88p4" event={"ID":"b564f44b-9cb9-4ce1-894d-2c88056ae4ee","Type":"ContainerStarted","Data":"6ecaa8d841d535fda3f321faa3c833d88e35c15256857f862835cb36211653fe"} Sep 29 10:11:10 crc kubenswrapper[4779]: I0929 10:11:10.401714 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-w88p4" event={"ID":"b564f44b-9cb9-4ce1-894d-2c88056ae4ee","Type":"ContainerStarted","Data":"75e405e24d2e7a187837cecb2469b307abc83d297a0cbbcca9104fc1c7c053b4"} Sep 29 10:11:10 crc kubenswrapper[4779]: I0929 10:11:10.421769 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-w88p4" podStartSLOduration=1.975962097 podStartE2EDuration="2.421745677s" podCreationTimestamp="2025-09-29 10:11:08 +0000 UTC" firstStartedPulling="2025-09-29 10:11:09.379859435 +0000 UTC m=+2501.361183339" lastFinishedPulling="2025-09-29 10:11:09.825643015 +0000 UTC m=+2501.806966919" observedRunningTime="2025-09-29 10:11:10.416534426 +0000 UTC m=+2502.397858340" watchObservedRunningTime="2025-09-29 10:11:10.421745677 +0000 UTC m=+2502.403069581" Sep 29 10:11:14 crc kubenswrapper[4779]: I0929 10:11:14.714275 4779 scope.go:117] "RemoveContainer" containerID="6c3b4d9e1288a1d587469d8a17efdcacdfb592049e65b194a12258479174566e" Sep 29 10:11:14 crc kubenswrapper[4779]: E0929 10:11:14.715060 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:11:15 crc kubenswrapper[4779]: I0929 10:11:15.447723 4779 generic.go:334] "Generic (PLEG): container finished" podID="b564f44b-9cb9-4ce1-894d-2c88056ae4ee" containerID="75e405e24d2e7a187837cecb2469b307abc83d297a0cbbcca9104fc1c7c053b4" exitCode=0 Sep 29 10:11:15 crc kubenswrapper[4779]: I0929 10:11:15.447826 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-w88p4" event={"ID":"b564f44b-9cb9-4ce1-894d-2c88056ae4ee","Type":"ContainerDied","Data":"75e405e24d2e7a187837cecb2469b307abc83d297a0cbbcca9104fc1c7c053b4"} Sep 29 10:11:16 crc kubenswrapper[4779]: I0929 10:11:16.893650 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-w88p4" Sep 29 10:11:17 crc kubenswrapper[4779]: I0929 10:11:17.071611 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b564f44b-9cb9-4ce1-894d-2c88056ae4ee-ceph\") pod \"b564f44b-9cb9-4ce1-894d-2c88056ae4ee\" (UID: \"b564f44b-9cb9-4ce1-894d-2c88056ae4ee\") " Sep 29 10:11:17 crc kubenswrapper[4779]: I0929 10:11:17.071697 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b564f44b-9cb9-4ce1-894d-2c88056ae4ee-inventory\") pod \"b564f44b-9cb9-4ce1-894d-2c88056ae4ee\" (UID: \"b564f44b-9cb9-4ce1-894d-2c88056ae4ee\") " Sep 29 10:11:17 crc kubenswrapper[4779]: I0929 10:11:17.071852 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b564f44b-9cb9-4ce1-894d-2c88056ae4ee-ssh-key\") pod \"b564f44b-9cb9-4ce1-894d-2c88056ae4ee\" (UID: \"b564f44b-9cb9-4ce1-894d-2c88056ae4ee\") " Sep 29 10:11:17 crc kubenswrapper[4779]: I0929 10:11:17.071924 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jsj9b\" (UniqueName: \"kubernetes.io/projected/b564f44b-9cb9-4ce1-894d-2c88056ae4ee-kube-api-access-jsj9b\") pod \"b564f44b-9cb9-4ce1-894d-2c88056ae4ee\" (UID: \"b564f44b-9cb9-4ce1-894d-2c88056ae4ee\") " Sep 29 10:11:17 crc kubenswrapper[4779]: I0929 10:11:17.077535 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b564f44b-9cb9-4ce1-894d-2c88056ae4ee-kube-api-access-jsj9b" (OuterVolumeSpecName: "kube-api-access-jsj9b") pod "b564f44b-9cb9-4ce1-894d-2c88056ae4ee" (UID: "b564f44b-9cb9-4ce1-894d-2c88056ae4ee"). InnerVolumeSpecName "kube-api-access-jsj9b". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 10:11:17 crc kubenswrapper[4779]: I0929 10:11:17.078037 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b564f44b-9cb9-4ce1-894d-2c88056ae4ee-ceph" (OuterVolumeSpecName: "ceph") pod "b564f44b-9cb9-4ce1-894d-2c88056ae4ee" (UID: "b564f44b-9cb9-4ce1-894d-2c88056ae4ee"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:11:17 crc kubenswrapper[4779]: I0929 10:11:17.106461 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b564f44b-9cb9-4ce1-894d-2c88056ae4ee-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "b564f44b-9cb9-4ce1-894d-2c88056ae4ee" (UID: "b564f44b-9cb9-4ce1-894d-2c88056ae4ee"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:11:17 crc kubenswrapper[4779]: I0929 10:11:17.106993 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b564f44b-9cb9-4ce1-894d-2c88056ae4ee-inventory" (OuterVolumeSpecName: "inventory") pod "b564f44b-9cb9-4ce1-894d-2c88056ae4ee" (UID: "b564f44b-9cb9-4ce1-894d-2c88056ae4ee"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:11:17 crc kubenswrapper[4779]: I0929 10:11:17.175536 4779 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b564f44b-9cb9-4ce1-894d-2c88056ae4ee-ceph\") on node \"crc\" DevicePath \"\"" Sep 29 10:11:17 crc kubenswrapper[4779]: I0929 10:11:17.175587 4779 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b564f44b-9cb9-4ce1-894d-2c88056ae4ee-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 10:11:17 crc kubenswrapper[4779]: I0929 10:11:17.175603 4779 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b564f44b-9cb9-4ce1-894d-2c88056ae4ee-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 10:11:17 crc kubenswrapper[4779]: I0929 10:11:17.175617 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jsj9b\" (UniqueName: \"kubernetes.io/projected/b564f44b-9cb9-4ce1-894d-2c88056ae4ee-kube-api-access-jsj9b\") on node \"crc\" DevicePath \"\"" Sep 29 10:11:17 crc kubenswrapper[4779]: I0929 10:11:17.469241 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-w88p4" event={"ID":"b564f44b-9cb9-4ce1-894d-2c88056ae4ee","Type":"ContainerDied","Data":"6ecaa8d841d535fda3f321faa3c833d88e35c15256857f862835cb36211653fe"} Sep 29 10:11:17 crc kubenswrapper[4779]: I0929 10:11:17.469286 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6ecaa8d841d535fda3f321faa3c833d88e35c15256857f862835cb36211653fe" Sep 29 10:11:17 crc kubenswrapper[4779]: I0929 10:11:17.469343 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-w88p4" Sep 29 10:11:17 crc kubenswrapper[4779]: I0929 10:11:17.577832 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-bxslq"] Sep 29 10:11:17 crc kubenswrapper[4779]: E0929 10:11:17.578390 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b564f44b-9cb9-4ce1-894d-2c88056ae4ee" containerName="ceph-client-edpm-deployment-openstack-edpm-ipam" Sep 29 10:11:17 crc kubenswrapper[4779]: I0929 10:11:17.578412 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="b564f44b-9cb9-4ce1-894d-2c88056ae4ee" containerName="ceph-client-edpm-deployment-openstack-edpm-ipam" Sep 29 10:11:17 crc kubenswrapper[4779]: I0929 10:11:17.578600 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="b564f44b-9cb9-4ce1-894d-2c88056ae4ee" containerName="ceph-client-edpm-deployment-openstack-edpm-ipam" Sep 29 10:11:17 crc kubenswrapper[4779]: I0929 10:11:17.579367 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bxslq" Sep 29 10:11:17 crc kubenswrapper[4779]: I0929 10:11:17.583043 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-np77s\" (UniqueName: \"kubernetes.io/projected/4069654b-84b4-4049-ad65-a2414376655c-kube-api-access-np77s\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-bxslq\" (UID: \"4069654b-84b4-4049-ad65-a2414376655c\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bxslq" Sep 29 10:11:17 crc kubenswrapper[4779]: I0929 10:11:17.583128 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/4069654b-84b4-4049-ad65-a2414376655c-ceph\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-bxslq\" (UID: \"4069654b-84b4-4049-ad65-a2414376655c\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bxslq" Sep 29 10:11:17 crc kubenswrapper[4779]: I0929 10:11:17.583274 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4069654b-84b4-4049-ad65-a2414376655c-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-bxslq\" (UID: \"4069654b-84b4-4049-ad65-a2414376655c\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bxslq" Sep 29 10:11:17 crc kubenswrapper[4779]: I0929 10:11:17.583306 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4069654b-84b4-4049-ad65-a2414376655c-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-bxslq\" (UID: \"4069654b-84b4-4049-ad65-a2414376655c\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bxslq" Sep 29 10:11:17 crc kubenswrapper[4779]: I0929 10:11:17.583327 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4069654b-84b4-4049-ad65-a2414376655c-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-bxslq\" (UID: \"4069654b-84b4-4049-ad65-a2414376655c\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bxslq" Sep 29 10:11:17 crc kubenswrapper[4779]: I0929 10:11:17.583365 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/4069654b-84b4-4049-ad65-a2414376655c-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-bxslq\" (UID: \"4069654b-84b4-4049-ad65-a2414376655c\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bxslq" Sep 29 10:11:17 crc kubenswrapper[4779]: I0929 10:11:17.583393 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-l7nsb" Sep 29 10:11:17 crc kubenswrapper[4779]: I0929 10:11:17.583519 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Sep 29 10:11:17 crc kubenswrapper[4779]: I0929 10:11:17.583614 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Sep 29 10:11:17 crc kubenswrapper[4779]: I0929 10:11:17.583663 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 10:11:17 crc kubenswrapper[4779]: I0929 10:11:17.584528 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 10:11:17 crc kubenswrapper[4779]: I0929 10:11:17.584578 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 10:11:17 crc kubenswrapper[4779]: I0929 10:11:17.606058 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-bxslq"] Sep 29 10:11:17 crc kubenswrapper[4779]: I0929 10:11:17.684610 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4069654b-84b4-4049-ad65-a2414376655c-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-bxslq\" (UID: \"4069654b-84b4-4049-ad65-a2414376655c\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bxslq" Sep 29 10:11:17 crc kubenswrapper[4779]: I0929 10:11:17.684660 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4069654b-84b4-4049-ad65-a2414376655c-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-bxslq\" (UID: \"4069654b-84b4-4049-ad65-a2414376655c\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bxslq" Sep 29 10:11:17 crc kubenswrapper[4779]: I0929 10:11:17.684684 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4069654b-84b4-4049-ad65-a2414376655c-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-bxslq\" (UID: \"4069654b-84b4-4049-ad65-a2414376655c\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bxslq" Sep 29 10:11:17 crc kubenswrapper[4779]: I0929 10:11:17.684716 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/4069654b-84b4-4049-ad65-a2414376655c-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-bxslq\" (UID: \"4069654b-84b4-4049-ad65-a2414376655c\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bxslq" Sep 29 10:11:17 crc kubenswrapper[4779]: I0929 10:11:17.684748 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-np77s\" (UniqueName: \"kubernetes.io/projected/4069654b-84b4-4049-ad65-a2414376655c-kube-api-access-np77s\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-bxslq\" (UID: \"4069654b-84b4-4049-ad65-a2414376655c\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bxslq" Sep 29 10:11:17 crc kubenswrapper[4779]: I0929 10:11:17.684782 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/4069654b-84b4-4049-ad65-a2414376655c-ceph\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-bxslq\" (UID: \"4069654b-84b4-4049-ad65-a2414376655c\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bxslq" Sep 29 10:11:17 crc kubenswrapper[4779]: I0929 10:11:17.686021 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/4069654b-84b4-4049-ad65-a2414376655c-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-bxslq\" (UID: \"4069654b-84b4-4049-ad65-a2414376655c\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bxslq" Sep 29 10:11:17 crc kubenswrapper[4779]: I0929 10:11:17.689171 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/4069654b-84b4-4049-ad65-a2414376655c-ceph\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-bxslq\" (UID: \"4069654b-84b4-4049-ad65-a2414376655c\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bxslq" Sep 29 10:11:17 crc kubenswrapper[4779]: I0929 10:11:17.689274 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4069654b-84b4-4049-ad65-a2414376655c-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-bxslq\" (UID: \"4069654b-84b4-4049-ad65-a2414376655c\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bxslq" Sep 29 10:11:17 crc kubenswrapper[4779]: I0929 10:11:17.693533 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4069654b-84b4-4049-ad65-a2414376655c-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-bxslq\" (UID: \"4069654b-84b4-4049-ad65-a2414376655c\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bxslq" Sep 29 10:11:17 crc kubenswrapper[4779]: I0929 10:11:17.694347 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4069654b-84b4-4049-ad65-a2414376655c-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-bxslq\" (UID: \"4069654b-84b4-4049-ad65-a2414376655c\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bxslq" Sep 29 10:11:17 crc kubenswrapper[4779]: I0929 10:11:17.702804 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-np77s\" (UniqueName: \"kubernetes.io/projected/4069654b-84b4-4049-ad65-a2414376655c-kube-api-access-np77s\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-bxslq\" (UID: \"4069654b-84b4-4049-ad65-a2414376655c\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bxslq" Sep 29 10:11:17 crc kubenswrapper[4779]: I0929 10:11:17.907790 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bxslq" Sep 29 10:11:18 crc kubenswrapper[4779]: I0929 10:11:18.423391 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-bxslq"] Sep 29 10:11:18 crc kubenswrapper[4779]: I0929 10:11:18.478303 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bxslq" event={"ID":"4069654b-84b4-4049-ad65-a2414376655c","Type":"ContainerStarted","Data":"ad7891d6e0065a20c511c0516e9e81adef7db7e1ca4e73896e67e0ac3f7c5d30"} Sep 29 10:11:19 crc kubenswrapper[4779]: I0929 10:11:19.492130 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bxslq" event={"ID":"4069654b-84b4-4049-ad65-a2414376655c","Type":"ContainerStarted","Data":"47a128604c9f928af9c6608906ad6b5afe56650c2a92c016bd7aa28952b3d55c"} Sep 29 10:11:19 crc kubenswrapper[4779]: I0929 10:11:19.525083 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bxslq" podStartSLOduration=2.001432502 podStartE2EDuration="2.525069067s" podCreationTimestamp="2025-09-29 10:11:17 +0000 UTC" firstStartedPulling="2025-09-29 10:11:18.437380361 +0000 UTC m=+2510.418704265" lastFinishedPulling="2025-09-29 10:11:18.961016936 +0000 UTC m=+2510.942340830" observedRunningTime="2025-09-29 10:11:19.521530254 +0000 UTC m=+2511.502854168" watchObservedRunningTime="2025-09-29 10:11:19.525069067 +0000 UTC m=+2511.506392971" Sep 29 10:11:27 crc kubenswrapper[4779]: I0929 10:11:27.714966 4779 scope.go:117] "RemoveContainer" containerID="6c3b4d9e1288a1d587469d8a17efdcacdfb592049e65b194a12258479174566e" Sep 29 10:11:27 crc kubenswrapper[4779]: E0929 10:11:27.715896 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:11:40 crc kubenswrapper[4779]: I0929 10:11:40.715003 4779 scope.go:117] "RemoveContainer" containerID="6c3b4d9e1288a1d587469d8a17efdcacdfb592049e65b194a12258479174566e" Sep 29 10:11:40 crc kubenswrapper[4779]: E0929 10:11:40.715807 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:11:55 crc kubenswrapper[4779]: I0929 10:11:55.714176 4779 scope.go:117] "RemoveContainer" containerID="6c3b4d9e1288a1d587469d8a17efdcacdfb592049e65b194a12258479174566e" Sep 29 10:11:55 crc kubenswrapper[4779]: E0929 10:11:55.715242 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:12:10 crc kubenswrapper[4779]: I0929 10:12:10.714489 4779 scope.go:117] "RemoveContainer" containerID="6c3b4d9e1288a1d587469d8a17efdcacdfb592049e65b194a12258479174566e" Sep 29 10:12:10 crc kubenswrapper[4779]: E0929 10:12:10.715524 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:12:24 crc kubenswrapper[4779]: I0929 10:12:24.715374 4779 scope.go:117] "RemoveContainer" containerID="6c3b4d9e1288a1d587469d8a17efdcacdfb592049e65b194a12258479174566e" Sep 29 10:12:24 crc kubenswrapper[4779]: E0929 10:12:24.717322 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:12:35 crc kubenswrapper[4779]: I0929 10:12:35.273112 4779 generic.go:334] "Generic (PLEG): container finished" podID="4069654b-84b4-4049-ad65-a2414376655c" containerID="47a128604c9f928af9c6608906ad6b5afe56650c2a92c016bd7aa28952b3d55c" exitCode=0 Sep 29 10:12:35 crc kubenswrapper[4779]: I0929 10:12:35.273215 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bxslq" event={"ID":"4069654b-84b4-4049-ad65-a2414376655c","Type":"ContainerDied","Data":"47a128604c9f928af9c6608906ad6b5afe56650c2a92c016bd7aa28952b3d55c"} Sep 29 10:12:36 crc kubenswrapper[4779]: I0929 10:12:36.750516 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bxslq" Sep 29 10:12:36 crc kubenswrapper[4779]: I0929 10:12:36.783931 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4069654b-84b4-4049-ad65-a2414376655c-inventory\") pod \"4069654b-84b4-4049-ad65-a2414376655c\" (UID: \"4069654b-84b4-4049-ad65-a2414376655c\") " Sep 29 10:12:36 crc kubenswrapper[4779]: I0929 10:12:36.784053 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/4069654b-84b4-4049-ad65-a2414376655c-ceph\") pod \"4069654b-84b4-4049-ad65-a2414376655c\" (UID: \"4069654b-84b4-4049-ad65-a2414376655c\") " Sep 29 10:12:36 crc kubenswrapper[4779]: I0929 10:12:36.784260 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4069654b-84b4-4049-ad65-a2414376655c-ovn-combined-ca-bundle\") pod \"4069654b-84b4-4049-ad65-a2414376655c\" (UID: \"4069654b-84b4-4049-ad65-a2414376655c\") " Sep 29 10:12:36 crc kubenswrapper[4779]: I0929 10:12:36.784320 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-np77s\" (UniqueName: \"kubernetes.io/projected/4069654b-84b4-4049-ad65-a2414376655c-kube-api-access-np77s\") pod \"4069654b-84b4-4049-ad65-a2414376655c\" (UID: \"4069654b-84b4-4049-ad65-a2414376655c\") " Sep 29 10:12:36 crc kubenswrapper[4779]: I0929 10:12:36.784369 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/4069654b-84b4-4049-ad65-a2414376655c-ovncontroller-config-0\") pod \"4069654b-84b4-4049-ad65-a2414376655c\" (UID: \"4069654b-84b4-4049-ad65-a2414376655c\") " Sep 29 10:12:36 crc kubenswrapper[4779]: I0929 10:12:36.784525 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4069654b-84b4-4049-ad65-a2414376655c-ssh-key\") pod \"4069654b-84b4-4049-ad65-a2414376655c\" (UID: \"4069654b-84b4-4049-ad65-a2414376655c\") " Sep 29 10:12:36 crc kubenswrapper[4779]: I0929 10:12:36.790311 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4069654b-84b4-4049-ad65-a2414376655c-ceph" (OuterVolumeSpecName: "ceph") pod "4069654b-84b4-4049-ad65-a2414376655c" (UID: "4069654b-84b4-4049-ad65-a2414376655c"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:12:36 crc kubenswrapper[4779]: I0929 10:12:36.790840 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4069654b-84b4-4049-ad65-a2414376655c-kube-api-access-np77s" (OuterVolumeSpecName: "kube-api-access-np77s") pod "4069654b-84b4-4049-ad65-a2414376655c" (UID: "4069654b-84b4-4049-ad65-a2414376655c"). InnerVolumeSpecName "kube-api-access-np77s". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 10:12:36 crc kubenswrapper[4779]: I0929 10:12:36.791546 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4069654b-84b4-4049-ad65-a2414376655c-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "4069654b-84b4-4049-ad65-a2414376655c" (UID: "4069654b-84b4-4049-ad65-a2414376655c"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:12:36 crc kubenswrapper[4779]: I0929 10:12:36.815806 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4069654b-84b4-4049-ad65-a2414376655c-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "4069654b-84b4-4049-ad65-a2414376655c" (UID: "4069654b-84b4-4049-ad65-a2414376655c"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:12:36 crc kubenswrapper[4779]: I0929 10:12:36.816357 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4069654b-84b4-4049-ad65-a2414376655c-inventory" (OuterVolumeSpecName: "inventory") pod "4069654b-84b4-4049-ad65-a2414376655c" (UID: "4069654b-84b4-4049-ad65-a2414376655c"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:12:36 crc kubenswrapper[4779]: I0929 10:12:36.823885 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4069654b-84b4-4049-ad65-a2414376655c-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "4069654b-84b4-4049-ad65-a2414376655c" (UID: "4069654b-84b4-4049-ad65-a2414376655c"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 10:12:36 crc kubenswrapper[4779]: I0929 10:12:36.886814 4779 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4069654b-84b4-4049-ad65-a2414376655c-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 10:12:36 crc kubenswrapper[4779]: I0929 10:12:36.886849 4779 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4069654b-84b4-4049-ad65-a2414376655c-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 10:12:36 crc kubenswrapper[4779]: I0929 10:12:36.886860 4779 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/4069654b-84b4-4049-ad65-a2414376655c-ceph\") on node \"crc\" DevicePath \"\"" Sep 29 10:12:36 crc kubenswrapper[4779]: I0929 10:12:36.886870 4779 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4069654b-84b4-4049-ad65-a2414376655c-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 10:12:36 crc kubenswrapper[4779]: I0929 10:12:36.886880 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-np77s\" (UniqueName: \"kubernetes.io/projected/4069654b-84b4-4049-ad65-a2414376655c-kube-api-access-np77s\") on node \"crc\" DevicePath \"\"" Sep 29 10:12:36 crc kubenswrapper[4779]: I0929 10:12:36.886889 4779 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/4069654b-84b4-4049-ad65-a2414376655c-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Sep 29 10:12:37 crc kubenswrapper[4779]: I0929 10:12:37.296099 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bxslq" event={"ID":"4069654b-84b4-4049-ad65-a2414376655c","Type":"ContainerDied","Data":"ad7891d6e0065a20c511c0516e9e81adef7db7e1ca4e73896e67e0ac3f7c5d30"} Sep 29 10:12:37 crc kubenswrapper[4779]: I0929 10:12:37.296144 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ad7891d6e0065a20c511c0516e9e81adef7db7e1ca4e73896e67e0ac3f7c5d30" Sep 29 10:12:37 crc kubenswrapper[4779]: I0929 10:12:37.296208 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bxslq" Sep 29 10:12:37 crc kubenswrapper[4779]: I0929 10:12:37.409220 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-pb5p7"] Sep 29 10:12:37 crc kubenswrapper[4779]: E0929 10:12:37.409609 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4069654b-84b4-4049-ad65-a2414376655c" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Sep 29 10:12:37 crc kubenswrapper[4779]: I0929 10:12:37.409630 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="4069654b-84b4-4049-ad65-a2414376655c" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Sep 29 10:12:37 crc kubenswrapper[4779]: I0929 10:12:37.409843 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="4069654b-84b4-4049-ad65-a2414376655c" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Sep 29 10:12:37 crc kubenswrapper[4779]: I0929 10:12:37.410520 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-pb5p7" Sep 29 10:12:37 crc kubenswrapper[4779]: I0929 10:12:37.413508 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Sep 29 10:12:37 crc kubenswrapper[4779]: I0929 10:12:37.413763 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Sep 29 10:12:37 crc kubenswrapper[4779]: I0929 10:12:37.414116 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 10:12:37 crc kubenswrapper[4779]: I0929 10:12:37.415107 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 10:12:37 crc kubenswrapper[4779]: I0929 10:12:37.415648 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 10:12:37 crc kubenswrapper[4779]: I0929 10:12:37.415755 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-l7nsb" Sep 29 10:12:37 crc kubenswrapper[4779]: I0929 10:12:37.416232 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Sep 29 10:12:37 crc kubenswrapper[4779]: I0929 10:12:37.436795 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-pb5p7"] Sep 29 10:12:37 crc kubenswrapper[4779]: I0929 10:12:37.499234 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f8a009ad-3b49-4843-8096-74a433d5d166-ceph\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-pb5p7\" (UID: \"f8a009ad-3b49-4843-8096-74a433d5d166\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-pb5p7" Sep 29 10:12:37 crc kubenswrapper[4779]: I0929 10:12:37.499306 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/f8a009ad-3b49-4843-8096-74a433d5d166-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-pb5p7\" (UID: \"f8a009ad-3b49-4843-8096-74a433d5d166\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-pb5p7" Sep 29 10:12:37 crc kubenswrapper[4779]: I0929 10:12:37.499375 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f8a009ad-3b49-4843-8096-74a433d5d166-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-pb5p7\" (UID: \"f8a009ad-3b49-4843-8096-74a433d5d166\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-pb5p7" Sep 29 10:12:37 crc kubenswrapper[4779]: I0929 10:12:37.499427 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s7bcv\" (UniqueName: \"kubernetes.io/projected/f8a009ad-3b49-4843-8096-74a433d5d166-kube-api-access-s7bcv\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-pb5p7\" (UID: \"f8a009ad-3b49-4843-8096-74a433d5d166\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-pb5p7" Sep 29 10:12:37 crc kubenswrapper[4779]: I0929 10:12:37.499659 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8a009ad-3b49-4843-8096-74a433d5d166-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-pb5p7\" (UID: \"f8a009ad-3b49-4843-8096-74a433d5d166\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-pb5p7" Sep 29 10:12:37 crc kubenswrapper[4779]: I0929 10:12:37.499872 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f8a009ad-3b49-4843-8096-74a433d5d166-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-pb5p7\" (UID: \"f8a009ad-3b49-4843-8096-74a433d5d166\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-pb5p7" Sep 29 10:12:37 crc kubenswrapper[4779]: I0929 10:12:37.500078 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/f8a009ad-3b49-4843-8096-74a433d5d166-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-pb5p7\" (UID: \"f8a009ad-3b49-4843-8096-74a433d5d166\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-pb5p7" Sep 29 10:12:37 crc kubenswrapper[4779]: I0929 10:12:37.601971 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f8a009ad-3b49-4843-8096-74a433d5d166-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-pb5p7\" (UID: \"f8a009ad-3b49-4843-8096-74a433d5d166\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-pb5p7" Sep 29 10:12:37 crc kubenswrapper[4779]: I0929 10:12:37.602072 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/f8a009ad-3b49-4843-8096-74a433d5d166-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-pb5p7\" (UID: \"f8a009ad-3b49-4843-8096-74a433d5d166\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-pb5p7" Sep 29 10:12:37 crc kubenswrapper[4779]: I0929 10:12:37.602113 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f8a009ad-3b49-4843-8096-74a433d5d166-ceph\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-pb5p7\" (UID: \"f8a009ad-3b49-4843-8096-74a433d5d166\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-pb5p7" Sep 29 10:12:37 crc kubenswrapper[4779]: I0929 10:12:37.602154 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/f8a009ad-3b49-4843-8096-74a433d5d166-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-pb5p7\" (UID: \"f8a009ad-3b49-4843-8096-74a433d5d166\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-pb5p7" Sep 29 10:12:37 crc kubenswrapper[4779]: I0929 10:12:37.602185 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f8a009ad-3b49-4843-8096-74a433d5d166-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-pb5p7\" (UID: \"f8a009ad-3b49-4843-8096-74a433d5d166\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-pb5p7" Sep 29 10:12:37 crc kubenswrapper[4779]: I0929 10:12:37.602238 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s7bcv\" (UniqueName: \"kubernetes.io/projected/f8a009ad-3b49-4843-8096-74a433d5d166-kube-api-access-s7bcv\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-pb5p7\" (UID: \"f8a009ad-3b49-4843-8096-74a433d5d166\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-pb5p7" Sep 29 10:12:37 crc kubenswrapper[4779]: I0929 10:12:37.602309 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8a009ad-3b49-4843-8096-74a433d5d166-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-pb5p7\" (UID: \"f8a009ad-3b49-4843-8096-74a433d5d166\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-pb5p7" Sep 29 10:12:37 crc kubenswrapper[4779]: I0929 10:12:37.606920 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f8a009ad-3b49-4843-8096-74a433d5d166-ceph\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-pb5p7\" (UID: \"f8a009ad-3b49-4843-8096-74a433d5d166\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-pb5p7" Sep 29 10:12:37 crc kubenswrapper[4779]: I0929 10:12:37.607339 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f8a009ad-3b49-4843-8096-74a433d5d166-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-pb5p7\" (UID: \"f8a009ad-3b49-4843-8096-74a433d5d166\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-pb5p7" Sep 29 10:12:37 crc kubenswrapper[4779]: I0929 10:12:37.607582 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f8a009ad-3b49-4843-8096-74a433d5d166-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-pb5p7\" (UID: \"f8a009ad-3b49-4843-8096-74a433d5d166\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-pb5p7" Sep 29 10:12:37 crc kubenswrapper[4779]: I0929 10:12:37.608432 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/f8a009ad-3b49-4843-8096-74a433d5d166-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-pb5p7\" (UID: \"f8a009ad-3b49-4843-8096-74a433d5d166\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-pb5p7" Sep 29 10:12:37 crc kubenswrapper[4779]: I0929 10:12:37.610098 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8a009ad-3b49-4843-8096-74a433d5d166-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-pb5p7\" (UID: \"f8a009ad-3b49-4843-8096-74a433d5d166\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-pb5p7" Sep 29 10:12:37 crc kubenswrapper[4779]: I0929 10:12:37.610524 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/f8a009ad-3b49-4843-8096-74a433d5d166-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-pb5p7\" (UID: \"f8a009ad-3b49-4843-8096-74a433d5d166\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-pb5p7" Sep 29 10:12:37 crc kubenswrapper[4779]: I0929 10:12:37.632043 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s7bcv\" (UniqueName: \"kubernetes.io/projected/f8a009ad-3b49-4843-8096-74a433d5d166-kube-api-access-s7bcv\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-pb5p7\" (UID: \"f8a009ad-3b49-4843-8096-74a433d5d166\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-pb5p7" Sep 29 10:12:37 crc kubenswrapper[4779]: I0929 10:12:37.714326 4779 scope.go:117] "RemoveContainer" containerID="6c3b4d9e1288a1d587469d8a17efdcacdfb592049e65b194a12258479174566e" Sep 29 10:12:37 crc kubenswrapper[4779]: E0929 10:12:37.714800 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:12:37 crc kubenswrapper[4779]: I0929 10:12:37.739200 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-pb5p7" Sep 29 10:12:38 crc kubenswrapper[4779]: I0929 10:12:38.284452 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-pb5p7"] Sep 29 10:12:38 crc kubenswrapper[4779]: I0929 10:12:38.306395 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-pb5p7" event={"ID":"f8a009ad-3b49-4843-8096-74a433d5d166","Type":"ContainerStarted","Data":"0aba341dcc3a1901e99cb10d6697d30016e4c0155efc38703a0a5161c482ddfa"} Sep 29 10:12:39 crc kubenswrapper[4779]: I0929 10:12:39.319940 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-pb5p7" event={"ID":"f8a009ad-3b49-4843-8096-74a433d5d166","Type":"ContainerStarted","Data":"2219a3a58401888403318d40f91ab3bd0bdd6a2610a029bcf5b7f964ffef95fd"} Sep 29 10:12:39 crc kubenswrapper[4779]: I0929 10:12:39.341052 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-pb5p7" podStartSLOduration=1.810994167 podStartE2EDuration="2.341032158s" podCreationTimestamp="2025-09-29 10:12:37 +0000 UTC" firstStartedPulling="2025-09-29 10:12:38.287012612 +0000 UTC m=+2590.268336516" lastFinishedPulling="2025-09-29 10:12:38.817050603 +0000 UTC m=+2590.798374507" observedRunningTime="2025-09-29 10:12:39.334642973 +0000 UTC m=+2591.315966877" watchObservedRunningTime="2025-09-29 10:12:39.341032158 +0000 UTC m=+2591.322356062" Sep 29 10:12:48 crc kubenswrapper[4779]: I0929 10:12:48.721227 4779 scope.go:117] "RemoveContainer" containerID="6c3b4d9e1288a1d587469d8a17efdcacdfb592049e65b194a12258479174566e" Sep 29 10:12:48 crc kubenswrapper[4779]: E0929 10:12:48.721956 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:13:00 crc kubenswrapper[4779]: I0929 10:13:00.715126 4779 scope.go:117] "RemoveContainer" containerID="6c3b4d9e1288a1d587469d8a17efdcacdfb592049e65b194a12258479174566e" Sep 29 10:13:00 crc kubenswrapper[4779]: E0929 10:13:00.716131 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:13:14 crc kubenswrapper[4779]: I0929 10:13:14.714042 4779 scope.go:117] "RemoveContainer" containerID="6c3b4d9e1288a1d587469d8a17efdcacdfb592049e65b194a12258479174566e" Sep 29 10:13:14 crc kubenswrapper[4779]: E0929 10:13:14.714961 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:13:27 crc kubenswrapper[4779]: I0929 10:13:27.714835 4779 scope.go:117] "RemoveContainer" containerID="6c3b4d9e1288a1d587469d8a17efdcacdfb592049e65b194a12258479174566e" Sep 29 10:13:27 crc kubenswrapper[4779]: E0929 10:13:27.716011 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:13:38 crc kubenswrapper[4779]: I0929 10:13:38.722084 4779 scope.go:117] "RemoveContainer" containerID="6c3b4d9e1288a1d587469d8a17efdcacdfb592049e65b194a12258479174566e" Sep 29 10:13:38 crc kubenswrapper[4779]: E0929 10:13:38.722896 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:13:38 crc kubenswrapper[4779]: I0929 10:13:38.933804 4779 generic.go:334] "Generic (PLEG): container finished" podID="f8a009ad-3b49-4843-8096-74a433d5d166" containerID="2219a3a58401888403318d40f91ab3bd0bdd6a2610a029bcf5b7f964ffef95fd" exitCode=0 Sep 29 10:13:38 crc kubenswrapper[4779]: I0929 10:13:38.933850 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-pb5p7" event={"ID":"f8a009ad-3b49-4843-8096-74a433d5d166","Type":"ContainerDied","Data":"2219a3a58401888403318d40f91ab3bd0bdd6a2610a029bcf5b7f964ffef95fd"} Sep 29 10:13:40 crc kubenswrapper[4779]: I0929 10:13:40.318769 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-pb5p7" Sep 29 10:13:40 crc kubenswrapper[4779]: I0929 10:13:40.332002 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s7bcv\" (UniqueName: \"kubernetes.io/projected/f8a009ad-3b49-4843-8096-74a433d5d166-kube-api-access-s7bcv\") pod \"f8a009ad-3b49-4843-8096-74a433d5d166\" (UID: \"f8a009ad-3b49-4843-8096-74a433d5d166\") " Sep 29 10:13:40 crc kubenswrapper[4779]: I0929 10:13:40.332044 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f8a009ad-3b49-4843-8096-74a433d5d166-ceph\") pod \"f8a009ad-3b49-4843-8096-74a433d5d166\" (UID: \"f8a009ad-3b49-4843-8096-74a433d5d166\") " Sep 29 10:13:40 crc kubenswrapper[4779]: I0929 10:13:40.332096 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/f8a009ad-3b49-4843-8096-74a433d5d166-neutron-ovn-metadata-agent-neutron-config-0\") pod \"f8a009ad-3b49-4843-8096-74a433d5d166\" (UID: \"f8a009ad-3b49-4843-8096-74a433d5d166\") " Sep 29 10:13:40 crc kubenswrapper[4779]: I0929 10:13:40.332177 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f8a009ad-3b49-4843-8096-74a433d5d166-inventory\") pod \"f8a009ad-3b49-4843-8096-74a433d5d166\" (UID: \"f8a009ad-3b49-4843-8096-74a433d5d166\") " Sep 29 10:13:40 crc kubenswrapper[4779]: I0929 10:13:40.332382 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/f8a009ad-3b49-4843-8096-74a433d5d166-nova-metadata-neutron-config-0\") pod \"f8a009ad-3b49-4843-8096-74a433d5d166\" (UID: \"f8a009ad-3b49-4843-8096-74a433d5d166\") " Sep 29 10:13:40 crc kubenswrapper[4779]: I0929 10:13:40.332441 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8a009ad-3b49-4843-8096-74a433d5d166-neutron-metadata-combined-ca-bundle\") pod \"f8a009ad-3b49-4843-8096-74a433d5d166\" (UID: \"f8a009ad-3b49-4843-8096-74a433d5d166\") " Sep 29 10:13:40 crc kubenswrapper[4779]: I0929 10:13:40.332483 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f8a009ad-3b49-4843-8096-74a433d5d166-ssh-key\") pod \"f8a009ad-3b49-4843-8096-74a433d5d166\" (UID: \"f8a009ad-3b49-4843-8096-74a433d5d166\") " Sep 29 10:13:40 crc kubenswrapper[4779]: I0929 10:13:40.339894 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8a009ad-3b49-4843-8096-74a433d5d166-ceph" (OuterVolumeSpecName: "ceph") pod "f8a009ad-3b49-4843-8096-74a433d5d166" (UID: "f8a009ad-3b49-4843-8096-74a433d5d166"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:13:40 crc kubenswrapper[4779]: I0929 10:13:40.349143 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8a009ad-3b49-4843-8096-74a433d5d166-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "f8a009ad-3b49-4843-8096-74a433d5d166" (UID: "f8a009ad-3b49-4843-8096-74a433d5d166"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:13:40 crc kubenswrapper[4779]: I0929 10:13:40.349983 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f8a009ad-3b49-4843-8096-74a433d5d166-kube-api-access-s7bcv" (OuterVolumeSpecName: "kube-api-access-s7bcv") pod "f8a009ad-3b49-4843-8096-74a433d5d166" (UID: "f8a009ad-3b49-4843-8096-74a433d5d166"). InnerVolumeSpecName "kube-api-access-s7bcv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 10:13:40 crc kubenswrapper[4779]: I0929 10:13:40.369233 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8a009ad-3b49-4843-8096-74a433d5d166-inventory" (OuterVolumeSpecName: "inventory") pod "f8a009ad-3b49-4843-8096-74a433d5d166" (UID: "f8a009ad-3b49-4843-8096-74a433d5d166"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:13:40 crc kubenswrapper[4779]: I0929 10:13:40.372795 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8a009ad-3b49-4843-8096-74a433d5d166-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "f8a009ad-3b49-4843-8096-74a433d5d166" (UID: "f8a009ad-3b49-4843-8096-74a433d5d166"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:13:40 crc kubenswrapper[4779]: I0929 10:13:40.373113 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8a009ad-3b49-4843-8096-74a433d5d166-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "f8a009ad-3b49-4843-8096-74a433d5d166" (UID: "f8a009ad-3b49-4843-8096-74a433d5d166"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:13:40 crc kubenswrapper[4779]: I0929 10:13:40.385751 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8a009ad-3b49-4843-8096-74a433d5d166-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "f8a009ad-3b49-4843-8096-74a433d5d166" (UID: "f8a009ad-3b49-4843-8096-74a433d5d166"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:13:40 crc kubenswrapper[4779]: I0929 10:13:40.434492 4779 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f8a009ad-3b49-4843-8096-74a433d5d166-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 10:13:40 crc kubenswrapper[4779]: I0929 10:13:40.434532 4779 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/f8a009ad-3b49-4843-8096-74a433d5d166-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Sep 29 10:13:40 crc kubenswrapper[4779]: I0929 10:13:40.434543 4779 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8a009ad-3b49-4843-8096-74a433d5d166-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 10:13:40 crc kubenswrapper[4779]: I0929 10:13:40.434556 4779 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f8a009ad-3b49-4843-8096-74a433d5d166-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 10:13:40 crc kubenswrapper[4779]: I0929 10:13:40.434565 4779 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f8a009ad-3b49-4843-8096-74a433d5d166-ceph\") on node \"crc\" DevicePath \"\"" Sep 29 10:13:40 crc kubenswrapper[4779]: I0929 10:13:40.434576 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s7bcv\" (UniqueName: \"kubernetes.io/projected/f8a009ad-3b49-4843-8096-74a433d5d166-kube-api-access-s7bcv\") on node \"crc\" DevicePath \"\"" Sep 29 10:13:40 crc kubenswrapper[4779]: I0929 10:13:40.434585 4779 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/f8a009ad-3b49-4843-8096-74a433d5d166-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Sep 29 10:13:40 crc kubenswrapper[4779]: I0929 10:13:40.798079 4779 scope.go:117] "RemoveContainer" containerID="d51ce2175e51fab877734fe7293b719bd1c73ac4a4aba25c610c0d0da00935f4" Sep 29 10:13:40 crc kubenswrapper[4779]: I0929 10:13:40.827443 4779 scope.go:117] "RemoveContainer" containerID="38c44f4a44e2cae94fd3544422a3816426f06a2672d008a8cfd5c14604e9ba8d" Sep 29 10:13:40 crc kubenswrapper[4779]: I0929 10:13:40.849359 4779 scope.go:117] "RemoveContainer" containerID="8e7ef548ecd4c3b9444b89e5abdd670808c14aa246c4b4f66ab4686d461420ce" Sep 29 10:13:40 crc kubenswrapper[4779]: I0929 10:13:40.951180 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-pb5p7" event={"ID":"f8a009ad-3b49-4843-8096-74a433d5d166","Type":"ContainerDied","Data":"0aba341dcc3a1901e99cb10d6697d30016e4c0155efc38703a0a5161c482ddfa"} Sep 29 10:13:40 crc kubenswrapper[4779]: I0929 10:13:40.951241 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0aba341dcc3a1901e99cb10d6697d30016e4c0155efc38703a0a5161c482ddfa" Sep 29 10:13:40 crc kubenswrapper[4779]: I0929 10:13:40.951337 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-pb5p7" Sep 29 10:13:41 crc kubenswrapper[4779]: I0929 10:13:41.060753 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ch2sw"] Sep 29 10:13:41 crc kubenswrapper[4779]: E0929 10:13:41.061574 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8a009ad-3b49-4843-8096-74a433d5d166" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Sep 29 10:13:41 crc kubenswrapper[4779]: I0929 10:13:41.061596 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8a009ad-3b49-4843-8096-74a433d5d166" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Sep 29 10:13:41 crc kubenswrapper[4779]: I0929 10:13:41.061859 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="f8a009ad-3b49-4843-8096-74a433d5d166" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Sep 29 10:13:41 crc kubenswrapper[4779]: I0929 10:13:41.063149 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ch2sw" Sep 29 10:13:41 crc kubenswrapper[4779]: I0929 10:13:41.065954 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 10:13:41 crc kubenswrapper[4779]: I0929 10:13:41.066009 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Sep 29 10:13:41 crc kubenswrapper[4779]: I0929 10:13:41.066126 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Sep 29 10:13:41 crc kubenswrapper[4779]: I0929 10:13:41.066232 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-l7nsb" Sep 29 10:13:41 crc kubenswrapper[4779]: I0929 10:13:41.066426 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 10:13:41 crc kubenswrapper[4779]: I0929 10:13:41.067427 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 10:13:41 crc kubenswrapper[4779]: I0929 10:13:41.129452 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ch2sw"] Sep 29 10:13:41 crc kubenswrapper[4779]: I0929 10:13:41.148469 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6czcc\" (UniqueName: \"kubernetes.io/projected/b328d4c8-0d40-4d1c-ade1-469e292e6d0e-kube-api-access-6czcc\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ch2sw\" (UID: \"b328d4c8-0d40-4d1c-ade1-469e292e6d0e\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ch2sw" Sep 29 10:13:41 crc kubenswrapper[4779]: I0929 10:13:41.148525 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b328d4c8-0d40-4d1c-ade1-469e292e6d0e-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ch2sw\" (UID: \"b328d4c8-0d40-4d1c-ade1-469e292e6d0e\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ch2sw" Sep 29 10:13:41 crc kubenswrapper[4779]: I0929 10:13:41.148620 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b328d4c8-0d40-4d1c-ade1-469e292e6d0e-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ch2sw\" (UID: \"b328d4c8-0d40-4d1c-ade1-469e292e6d0e\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ch2sw" Sep 29 10:13:41 crc kubenswrapper[4779]: I0929 10:13:41.148642 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b328d4c8-0d40-4d1c-ade1-469e292e6d0e-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ch2sw\" (UID: \"b328d4c8-0d40-4d1c-ade1-469e292e6d0e\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ch2sw" Sep 29 10:13:41 crc kubenswrapper[4779]: I0929 10:13:41.148687 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/b328d4c8-0d40-4d1c-ade1-469e292e6d0e-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ch2sw\" (UID: \"b328d4c8-0d40-4d1c-ade1-469e292e6d0e\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ch2sw" Sep 29 10:13:41 crc kubenswrapper[4779]: I0929 10:13:41.148707 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b328d4c8-0d40-4d1c-ade1-469e292e6d0e-ceph\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ch2sw\" (UID: \"b328d4c8-0d40-4d1c-ade1-469e292e6d0e\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ch2sw" Sep 29 10:13:41 crc kubenswrapper[4779]: I0929 10:13:41.249992 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6czcc\" (UniqueName: \"kubernetes.io/projected/b328d4c8-0d40-4d1c-ade1-469e292e6d0e-kube-api-access-6czcc\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ch2sw\" (UID: \"b328d4c8-0d40-4d1c-ade1-469e292e6d0e\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ch2sw" Sep 29 10:13:41 crc kubenswrapper[4779]: I0929 10:13:41.250062 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b328d4c8-0d40-4d1c-ade1-469e292e6d0e-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ch2sw\" (UID: \"b328d4c8-0d40-4d1c-ade1-469e292e6d0e\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ch2sw" Sep 29 10:13:41 crc kubenswrapper[4779]: I0929 10:13:41.250184 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b328d4c8-0d40-4d1c-ade1-469e292e6d0e-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ch2sw\" (UID: \"b328d4c8-0d40-4d1c-ade1-469e292e6d0e\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ch2sw" Sep 29 10:13:41 crc kubenswrapper[4779]: I0929 10:13:41.250214 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b328d4c8-0d40-4d1c-ade1-469e292e6d0e-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ch2sw\" (UID: \"b328d4c8-0d40-4d1c-ade1-469e292e6d0e\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ch2sw" Sep 29 10:13:41 crc kubenswrapper[4779]: I0929 10:13:41.250279 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/b328d4c8-0d40-4d1c-ade1-469e292e6d0e-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ch2sw\" (UID: \"b328d4c8-0d40-4d1c-ade1-469e292e6d0e\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ch2sw" Sep 29 10:13:41 crc kubenswrapper[4779]: I0929 10:13:41.250309 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b328d4c8-0d40-4d1c-ade1-469e292e6d0e-ceph\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ch2sw\" (UID: \"b328d4c8-0d40-4d1c-ade1-469e292e6d0e\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ch2sw" Sep 29 10:13:41 crc kubenswrapper[4779]: I0929 10:13:41.258632 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b328d4c8-0d40-4d1c-ade1-469e292e6d0e-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ch2sw\" (UID: \"b328d4c8-0d40-4d1c-ade1-469e292e6d0e\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ch2sw" Sep 29 10:13:41 crc kubenswrapper[4779]: I0929 10:13:41.258701 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/b328d4c8-0d40-4d1c-ade1-469e292e6d0e-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ch2sw\" (UID: \"b328d4c8-0d40-4d1c-ade1-469e292e6d0e\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ch2sw" Sep 29 10:13:41 crc kubenswrapper[4779]: I0929 10:13:41.258749 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b328d4c8-0d40-4d1c-ade1-469e292e6d0e-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ch2sw\" (UID: \"b328d4c8-0d40-4d1c-ade1-469e292e6d0e\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ch2sw" Sep 29 10:13:41 crc kubenswrapper[4779]: I0929 10:13:41.259101 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b328d4c8-0d40-4d1c-ade1-469e292e6d0e-ceph\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ch2sw\" (UID: \"b328d4c8-0d40-4d1c-ade1-469e292e6d0e\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ch2sw" Sep 29 10:13:41 crc kubenswrapper[4779]: I0929 10:13:41.259204 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b328d4c8-0d40-4d1c-ade1-469e292e6d0e-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ch2sw\" (UID: \"b328d4c8-0d40-4d1c-ade1-469e292e6d0e\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ch2sw" Sep 29 10:13:41 crc kubenswrapper[4779]: I0929 10:13:41.288691 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6czcc\" (UniqueName: \"kubernetes.io/projected/b328d4c8-0d40-4d1c-ade1-469e292e6d0e-kube-api-access-6czcc\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ch2sw\" (UID: \"b328d4c8-0d40-4d1c-ade1-469e292e6d0e\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ch2sw" Sep 29 10:13:41 crc kubenswrapper[4779]: I0929 10:13:41.424882 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ch2sw" Sep 29 10:13:41 crc kubenswrapper[4779]: I0929 10:13:41.945701 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ch2sw"] Sep 29 10:13:41 crc kubenswrapper[4779]: I0929 10:13:41.963644 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ch2sw" event={"ID":"b328d4c8-0d40-4d1c-ade1-469e292e6d0e","Type":"ContainerStarted","Data":"7900adf376174dd4d73d18a384a2c2539525d07464522c5addc8f5d76f74386a"} Sep 29 10:13:43 crc kubenswrapper[4779]: I0929 10:13:43.990325 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ch2sw" event={"ID":"b328d4c8-0d40-4d1c-ade1-469e292e6d0e","Type":"ContainerStarted","Data":"d97de09f3756b10ed4aba840dd6d4c0df64172a543aa0a33139a5824237a2879"} Sep 29 10:13:44 crc kubenswrapper[4779]: I0929 10:13:44.016309 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ch2sw" podStartSLOduration=2.274326296 podStartE2EDuration="3.016284304s" podCreationTimestamp="2025-09-29 10:13:41 +0000 UTC" firstStartedPulling="2025-09-29 10:13:41.9560971 +0000 UTC m=+2653.937421004" lastFinishedPulling="2025-09-29 10:13:42.698055098 +0000 UTC m=+2654.679379012" observedRunningTime="2025-09-29 10:13:44.010510596 +0000 UTC m=+2655.991834520" watchObservedRunningTime="2025-09-29 10:13:44.016284304 +0000 UTC m=+2655.997608218" Sep 29 10:13:50 crc kubenswrapper[4779]: I0929 10:13:50.714417 4779 scope.go:117] "RemoveContainer" containerID="6c3b4d9e1288a1d587469d8a17efdcacdfb592049e65b194a12258479174566e" Sep 29 10:13:50 crc kubenswrapper[4779]: E0929 10:13:50.715184 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:14:05 crc kubenswrapper[4779]: I0929 10:14:05.714253 4779 scope.go:117] "RemoveContainer" containerID="6c3b4d9e1288a1d587469d8a17efdcacdfb592049e65b194a12258479174566e" Sep 29 10:14:05 crc kubenswrapper[4779]: E0929 10:14:05.716335 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:14:20 crc kubenswrapper[4779]: I0929 10:14:20.715139 4779 scope.go:117] "RemoveContainer" containerID="6c3b4d9e1288a1d587469d8a17efdcacdfb592049e65b194a12258479174566e" Sep 29 10:14:20 crc kubenswrapper[4779]: E0929 10:14:20.716136 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:14:21 crc kubenswrapper[4779]: I0929 10:14:21.974490 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-bk5pv"] Sep 29 10:14:21 crc kubenswrapper[4779]: I0929 10:14:21.978128 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bk5pv" Sep 29 10:14:21 crc kubenswrapper[4779]: I0929 10:14:21.988438 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-bk5pv"] Sep 29 10:14:22 crc kubenswrapper[4779]: I0929 10:14:22.079068 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7p7bg\" (UniqueName: \"kubernetes.io/projected/d9e4fef5-fc77-4120-8ef9-8cd7cb88b586-kube-api-access-7p7bg\") pod \"redhat-operators-bk5pv\" (UID: \"d9e4fef5-fc77-4120-8ef9-8cd7cb88b586\") " pod="openshift-marketplace/redhat-operators-bk5pv" Sep 29 10:14:22 crc kubenswrapper[4779]: I0929 10:14:22.079221 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9e4fef5-fc77-4120-8ef9-8cd7cb88b586-catalog-content\") pod \"redhat-operators-bk5pv\" (UID: \"d9e4fef5-fc77-4120-8ef9-8cd7cb88b586\") " pod="openshift-marketplace/redhat-operators-bk5pv" Sep 29 10:14:22 crc kubenswrapper[4779]: I0929 10:14:22.079281 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9e4fef5-fc77-4120-8ef9-8cd7cb88b586-utilities\") pod \"redhat-operators-bk5pv\" (UID: \"d9e4fef5-fc77-4120-8ef9-8cd7cb88b586\") " pod="openshift-marketplace/redhat-operators-bk5pv" Sep 29 10:14:22 crc kubenswrapper[4779]: I0929 10:14:22.181391 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7p7bg\" (UniqueName: \"kubernetes.io/projected/d9e4fef5-fc77-4120-8ef9-8cd7cb88b586-kube-api-access-7p7bg\") pod \"redhat-operators-bk5pv\" (UID: \"d9e4fef5-fc77-4120-8ef9-8cd7cb88b586\") " pod="openshift-marketplace/redhat-operators-bk5pv" Sep 29 10:14:22 crc kubenswrapper[4779]: I0929 10:14:22.181527 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9e4fef5-fc77-4120-8ef9-8cd7cb88b586-catalog-content\") pod \"redhat-operators-bk5pv\" (UID: \"d9e4fef5-fc77-4120-8ef9-8cd7cb88b586\") " pod="openshift-marketplace/redhat-operators-bk5pv" Sep 29 10:14:22 crc kubenswrapper[4779]: I0929 10:14:22.181591 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9e4fef5-fc77-4120-8ef9-8cd7cb88b586-utilities\") pod \"redhat-operators-bk5pv\" (UID: \"d9e4fef5-fc77-4120-8ef9-8cd7cb88b586\") " pod="openshift-marketplace/redhat-operators-bk5pv" Sep 29 10:14:22 crc kubenswrapper[4779]: I0929 10:14:22.181998 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9e4fef5-fc77-4120-8ef9-8cd7cb88b586-catalog-content\") pod \"redhat-operators-bk5pv\" (UID: \"d9e4fef5-fc77-4120-8ef9-8cd7cb88b586\") " pod="openshift-marketplace/redhat-operators-bk5pv" Sep 29 10:14:22 crc kubenswrapper[4779]: I0929 10:14:22.182075 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9e4fef5-fc77-4120-8ef9-8cd7cb88b586-utilities\") pod \"redhat-operators-bk5pv\" (UID: \"d9e4fef5-fc77-4120-8ef9-8cd7cb88b586\") " pod="openshift-marketplace/redhat-operators-bk5pv" Sep 29 10:14:22 crc kubenswrapper[4779]: I0929 10:14:22.204839 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7p7bg\" (UniqueName: \"kubernetes.io/projected/d9e4fef5-fc77-4120-8ef9-8cd7cb88b586-kube-api-access-7p7bg\") pod \"redhat-operators-bk5pv\" (UID: \"d9e4fef5-fc77-4120-8ef9-8cd7cb88b586\") " pod="openshift-marketplace/redhat-operators-bk5pv" Sep 29 10:14:22 crc kubenswrapper[4779]: I0929 10:14:22.308560 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bk5pv" Sep 29 10:14:22 crc kubenswrapper[4779]: I0929 10:14:22.775253 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-bk5pv"] Sep 29 10:14:23 crc kubenswrapper[4779]: I0929 10:14:23.432608 4779 generic.go:334] "Generic (PLEG): container finished" podID="d9e4fef5-fc77-4120-8ef9-8cd7cb88b586" containerID="374d786c0844d8c1b559107a62641eddf9fb620e3b035d77c1772659f2819122" exitCode=0 Sep 29 10:14:23 crc kubenswrapper[4779]: I0929 10:14:23.432716 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bk5pv" event={"ID":"d9e4fef5-fc77-4120-8ef9-8cd7cb88b586","Type":"ContainerDied","Data":"374d786c0844d8c1b559107a62641eddf9fb620e3b035d77c1772659f2819122"} Sep 29 10:14:23 crc kubenswrapper[4779]: I0929 10:14:23.433196 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bk5pv" event={"ID":"d9e4fef5-fc77-4120-8ef9-8cd7cb88b586","Type":"ContainerStarted","Data":"be07e599a54c3c30d748fd01fbf598b681a19e43eadf5cf3e3ac779df4647173"} Sep 29 10:14:23 crc kubenswrapper[4779]: I0929 10:14:23.450515 4779 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 10:14:25 crc kubenswrapper[4779]: I0929 10:14:25.468411 4779 generic.go:334] "Generic (PLEG): container finished" podID="d9e4fef5-fc77-4120-8ef9-8cd7cb88b586" containerID="de935851dd84747f0d17803e791295109975cc1b46221cb079af9e13d5629154" exitCode=0 Sep 29 10:14:25 crc kubenswrapper[4779]: I0929 10:14:25.468482 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bk5pv" event={"ID":"d9e4fef5-fc77-4120-8ef9-8cd7cb88b586","Type":"ContainerDied","Data":"de935851dd84747f0d17803e791295109975cc1b46221cb079af9e13d5629154"} Sep 29 10:14:26 crc kubenswrapper[4779]: I0929 10:14:26.485484 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bk5pv" event={"ID":"d9e4fef5-fc77-4120-8ef9-8cd7cb88b586","Type":"ContainerStarted","Data":"f48c1ed1d79e3156e2d8196e41e3fd7f5daec0795579b3968f2391e425d109f1"} Sep 29 10:14:26 crc kubenswrapper[4779]: I0929 10:14:26.523844 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-bk5pv" podStartSLOduration=3.088833498 podStartE2EDuration="5.523815931s" podCreationTimestamp="2025-09-29 10:14:21 +0000 UTC" firstStartedPulling="2025-09-29 10:14:23.450162019 +0000 UTC m=+2695.431485933" lastFinishedPulling="2025-09-29 10:14:25.885144432 +0000 UTC m=+2697.866468366" observedRunningTime="2025-09-29 10:14:26.509398221 +0000 UTC m=+2698.490722135" watchObservedRunningTime="2025-09-29 10:14:26.523815931 +0000 UTC m=+2698.505139855" Sep 29 10:14:31 crc kubenswrapper[4779]: I0929 10:14:31.714708 4779 scope.go:117] "RemoveContainer" containerID="6c3b4d9e1288a1d587469d8a17efdcacdfb592049e65b194a12258479174566e" Sep 29 10:14:31 crc kubenswrapper[4779]: E0929 10:14:31.715436 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:14:32 crc kubenswrapper[4779]: I0929 10:14:32.308754 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-bk5pv" Sep 29 10:14:32 crc kubenswrapper[4779]: I0929 10:14:32.309485 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-bk5pv" Sep 29 10:14:32 crc kubenswrapper[4779]: I0929 10:14:32.365695 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-bk5pv" Sep 29 10:14:32 crc kubenswrapper[4779]: I0929 10:14:32.646154 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-bk5pv" Sep 29 10:14:32 crc kubenswrapper[4779]: I0929 10:14:32.751897 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-bk5pv"] Sep 29 10:14:34 crc kubenswrapper[4779]: I0929 10:14:34.591258 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-bk5pv" podUID="d9e4fef5-fc77-4120-8ef9-8cd7cb88b586" containerName="registry-server" containerID="cri-o://f48c1ed1d79e3156e2d8196e41e3fd7f5daec0795579b3968f2391e425d109f1" gracePeriod=2 Sep 29 10:14:35 crc kubenswrapper[4779]: I0929 10:14:35.077376 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bk5pv" Sep 29 10:14:35 crc kubenswrapper[4779]: I0929 10:14:35.221501 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7p7bg\" (UniqueName: \"kubernetes.io/projected/d9e4fef5-fc77-4120-8ef9-8cd7cb88b586-kube-api-access-7p7bg\") pod \"d9e4fef5-fc77-4120-8ef9-8cd7cb88b586\" (UID: \"d9e4fef5-fc77-4120-8ef9-8cd7cb88b586\") " Sep 29 10:14:35 crc kubenswrapper[4779]: I0929 10:14:35.221628 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9e4fef5-fc77-4120-8ef9-8cd7cb88b586-catalog-content\") pod \"d9e4fef5-fc77-4120-8ef9-8cd7cb88b586\" (UID: \"d9e4fef5-fc77-4120-8ef9-8cd7cb88b586\") " Sep 29 10:14:35 crc kubenswrapper[4779]: I0929 10:14:35.221812 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9e4fef5-fc77-4120-8ef9-8cd7cb88b586-utilities\") pod \"d9e4fef5-fc77-4120-8ef9-8cd7cb88b586\" (UID: \"d9e4fef5-fc77-4120-8ef9-8cd7cb88b586\") " Sep 29 10:14:35 crc kubenswrapper[4779]: I0929 10:14:35.223102 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d9e4fef5-fc77-4120-8ef9-8cd7cb88b586-utilities" (OuterVolumeSpecName: "utilities") pod "d9e4fef5-fc77-4120-8ef9-8cd7cb88b586" (UID: "d9e4fef5-fc77-4120-8ef9-8cd7cb88b586"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 10:14:35 crc kubenswrapper[4779]: I0929 10:14:35.233553 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d9e4fef5-fc77-4120-8ef9-8cd7cb88b586-kube-api-access-7p7bg" (OuterVolumeSpecName: "kube-api-access-7p7bg") pod "d9e4fef5-fc77-4120-8ef9-8cd7cb88b586" (UID: "d9e4fef5-fc77-4120-8ef9-8cd7cb88b586"). InnerVolumeSpecName "kube-api-access-7p7bg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 10:14:35 crc kubenswrapper[4779]: I0929 10:14:35.324234 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9e4fef5-fc77-4120-8ef9-8cd7cb88b586-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 10:14:35 crc kubenswrapper[4779]: I0929 10:14:35.324284 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7p7bg\" (UniqueName: \"kubernetes.io/projected/d9e4fef5-fc77-4120-8ef9-8cd7cb88b586-kube-api-access-7p7bg\") on node \"crc\" DevicePath \"\"" Sep 29 10:14:35 crc kubenswrapper[4779]: I0929 10:14:35.607060 4779 generic.go:334] "Generic (PLEG): container finished" podID="d9e4fef5-fc77-4120-8ef9-8cd7cb88b586" containerID="f48c1ed1d79e3156e2d8196e41e3fd7f5daec0795579b3968f2391e425d109f1" exitCode=0 Sep 29 10:14:35 crc kubenswrapper[4779]: I0929 10:14:35.607169 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bk5pv" Sep 29 10:14:35 crc kubenswrapper[4779]: I0929 10:14:35.607186 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bk5pv" event={"ID":"d9e4fef5-fc77-4120-8ef9-8cd7cb88b586","Type":"ContainerDied","Data":"f48c1ed1d79e3156e2d8196e41e3fd7f5daec0795579b3968f2391e425d109f1"} Sep 29 10:14:35 crc kubenswrapper[4779]: I0929 10:14:35.607664 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bk5pv" event={"ID":"d9e4fef5-fc77-4120-8ef9-8cd7cb88b586","Type":"ContainerDied","Data":"be07e599a54c3c30d748fd01fbf598b681a19e43eadf5cf3e3ac779df4647173"} Sep 29 10:14:35 crc kubenswrapper[4779]: I0929 10:14:35.607878 4779 scope.go:117] "RemoveContainer" containerID="f48c1ed1d79e3156e2d8196e41e3fd7f5daec0795579b3968f2391e425d109f1" Sep 29 10:14:35 crc kubenswrapper[4779]: I0929 10:14:35.641862 4779 scope.go:117] "RemoveContainer" containerID="de935851dd84747f0d17803e791295109975cc1b46221cb079af9e13d5629154" Sep 29 10:14:35 crc kubenswrapper[4779]: I0929 10:14:35.675828 4779 scope.go:117] "RemoveContainer" containerID="374d786c0844d8c1b559107a62641eddf9fb620e3b035d77c1772659f2819122" Sep 29 10:14:35 crc kubenswrapper[4779]: I0929 10:14:35.715559 4779 scope.go:117] "RemoveContainer" containerID="f48c1ed1d79e3156e2d8196e41e3fd7f5daec0795579b3968f2391e425d109f1" Sep 29 10:14:35 crc kubenswrapper[4779]: E0929 10:14:35.716349 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f48c1ed1d79e3156e2d8196e41e3fd7f5daec0795579b3968f2391e425d109f1\": container with ID starting with f48c1ed1d79e3156e2d8196e41e3fd7f5daec0795579b3968f2391e425d109f1 not found: ID does not exist" containerID="f48c1ed1d79e3156e2d8196e41e3fd7f5daec0795579b3968f2391e425d109f1" Sep 29 10:14:35 crc kubenswrapper[4779]: I0929 10:14:35.716398 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f48c1ed1d79e3156e2d8196e41e3fd7f5daec0795579b3968f2391e425d109f1"} err="failed to get container status \"f48c1ed1d79e3156e2d8196e41e3fd7f5daec0795579b3968f2391e425d109f1\": rpc error: code = NotFound desc = could not find container \"f48c1ed1d79e3156e2d8196e41e3fd7f5daec0795579b3968f2391e425d109f1\": container with ID starting with f48c1ed1d79e3156e2d8196e41e3fd7f5daec0795579b3968f2391e425d109f1 not found: ID does not exist" Sep 29 10:14:35 crc kubenswrapper[4779]: I0929 10:14:35.716427 4779 scope.go:117] "RemoveContainer" containerID="de935851dd84747f0d17803e791295109975cc1b46221cb079af9e13d5629154" Sep 29 10:14:35 crc kubenswrapper[4779]: E0929 10:14:35.717044 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"de935851dd84747f0d17803e791295109975cc1b46221cb079af9e13d5629154\": container with ID starting with de935851dd84747f0d17803e791295109975cc1b46221cb079af9e13d5629154 not found: ID does not exist" containerID="de935851dd84747f0d17803e791295109975cc1b46221cb079af9e13d5629154" Sep 29 10:14:35 crc kubenswrapper[4779]: I0929 10:14:35.717113 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de935851dd84747f0d17803e791295109975cc1b46221cb079af9e13d5629154"} err="failed to get container status \"de935851dd84747f0d17803e791295109975cc1b46221cb079af9e13d5629154\": rpc error: code = NotFound desc = could not find container \"de935851dd84747f0d17803e791295109975cc1b46221cb079af9e13d5629154\": container with ID starting with de935851dd84747f0d17803e791295109975cc1b46221cb079af9e13d5629154 not found: ID does not exist" Sep 29 10:14:35 crc kubenswrapper[4779]: I0929 10:14:35.717157 4779 scope.go:117] "RemoveContainer" containerID="374d786c0844d8c1b559107a62641eddf9fb620e3b035d77c1772659f2819122" Sep 29 10:14:35 crc kubenswrapper[4779]: E0929 10:14:35.717550 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"374d786c0844d8c1b559107a62641eddf9fb620e3b035d77c1772659f2819122\": container with ID starting with 374d786c0844d8c1b559107a62641eddf9fb620e3b035d77c1772659f2819122 not found: ID does not exist" containerID="374d786c0844d8c1b559107a62641eddf9fb620e3b035d77c1772659f2819122" Sep 29 10:14:35 crc kubenswrapper[4779]: I0929 10:14:35.717586 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"374d786c0844d8c1b559107a62641eddf9fb620e3b035d77c1772659f2819122"} err="failed to get container status \"374d786c0844d8c1b559107a62641eddf9fb620e3b035d77c1772659f2819122\": rpc error: code = NotFound desc = could not find container \"374d786c0844d8c1b559107a62641eddf9fb620e3b035d77c1772659f2819122\": container with ID starting with 374d786c0844d8c1b559107a62641eddf9fb620e3b035d77c1772659f2819122 not found: ID does not exist" Sep 29 10:14:36 crc kubenswrapper[4779]: I0929 10:14:36.952125 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d9e4fef5-fc77-4120-8ef9-8cd7cb88b586-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d9e4fef5-fc77-4120-8ef9-8cd7cb88b586" (UID: "d9e4fef5-fc77-4120-8ef9-8cd7cb88b586"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 10:14:36 crc kubenswrapper[4779]: I0929 10:14:36.965107 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9e4fef5-fc77-4120-8ef9-8cd7cb88b586-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 10:14:37 crc kubenswrapper[4779]: I0929 10:14:37.169092 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-bk5pv"] Sep 29 10:14:37 crc kubenswrapper[4779]: I0929 10:14:37.188083 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-bk5pv"] Sep 29 10:14:38 crc kubenswrapper[4779]: I0929 10:14:38.732569 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d9e4fef5-fc77-4120-8ef9-8cd7cb88b586" path="/var/lib/kubelet/pods/d9e4fef5-fc77-4120-8ef9-8cd7cb88b586/volumes" Sep 29 10:14:42 crc kubenswrapper[4779]: I0929 10:14:42.716053 4779 scope.go:117] "RemoveContainer" containerID="6c3b4d9e1288a1d587469d8a17efdcacdfb592049e65b194a12258479174566e" Sep 29 10:14:42 crc kubenswrapper[4779]: E0929 10:14:42.717476 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:14:49 crc kubenswrapper[4779]: I0929 10:14:49.139745 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-b2jw4"] Sep 29 10:14:49 crc kubenswrapper[4779]: E0929 10:14:49.140599 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9e4fef5-fc77-4120-8ef9-8cd7cb88b586" containerName="registry-server" Sep 29 10:14:49 crc kubenswrapper[4779]: I0929 10:14:49.140611 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9e4fef5-fc77-4120-8ef9-8cd7cb88b586" containerName="registry-server" Sep 29 10:14:49 crc kubenswrapper[4779]: E0929 10:14:49.140629 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9e4fef5-fc77-4120-8ef9-8cd7cb88b586" containerName="extract-utilities" Sep 29 10:14:49 crc kubenswrapper[4779]: I0929 10:14:49.140636 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9e4fef5-fc77-4120-8ef9-8cd7cb88b586" containerName="extract-utilities" Sep 29 10:14:49 crc kubenswrapper[4779]: E0929 10:14:49.140660 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9e4fef5-fc77-4120-8ef9-8cd7cb88b586" containerName="extract-content" Sep 29 10:14:49 crc kubenswrapper[4779]: I0929 10:14:49.140665 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9e4fef5-fc77-4120-8ef9-8cd7cb88b586" containerName="extract-content" Sep 29 10:14:49 crc kubenswrapper[4779]: I0929 10:14:49.140845 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9e4fef5-fc77-4120-8ef9-8cd7cb88b586" containerName="registry-server" Sep 29 10:14:49 crc kubenswrapper[4779]: I0929 10:14:49.142702 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-b2jw4" Sep 29 10:14:49 crc kubenswrapper[4779]: I0929 10:14:49.159622 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-b2jw4"] Sep 29 10:14:49 crc kubenswrapper[4779]: I0929 10:14:49.266065 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d54894d4-ce45-4397-8d72-d2f7bf37c197-utilities\") pod \"community-operators-b2jw4\" (UID: \"d54894d4-ce45-4397-8d72-d2f7bf37c197\") " pod="openshift-marketplace/community-operators-b2jw4" Sep 29 10:14:49 crc kubenswrapper[4779]: I0929 10:14:49.266202 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d54894d4-ce45-4397-8d72-d2f7bf37c197-catalog-content\") pod \"community-operators-b2jw4\" (UID: \"d54894d4-ce45-4397-8d72-d2f7bf37c197\") " pod="openshift-marketplace/community-operators-b2jw4" Sep 29 10:14:49 crc kubenswrapper[4779]: I0929 10:14:49.266271 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xkvj4\" (UniqueName: \"kubernetes.io/projected/d54894d4-ce45-4397-8d72-d2f7bf37c197-kube-api-access-xkvj4\") pod \"community-operators-b2jw4\" (UID: \"d54894d4-ce45-4397-8d72-d2f7bf37c197\") " pod="openshift-marketplace/community-operators-b2jw4" Sep 29 10:14:49 crc kubenswrapper[4779]: I0929 10:14:49.368093 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d54894d4-ce45-4397-8d72-d2f7bf37c197-catalog-content\") pod \"community-operators-b2jw4\" (UID: \"d54894d4-ce45-4397-8d72-d2f7bf37c197\") " pod="openshift-marketplace/community-operators-b2jw4" Sep 29 10:14:49 crc kubenswrapper[4779]: I0929 10:14:49.368165 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xkvj4\" (UniqueName: \"kubernetes.io/projected/d54894d4-ce45-4397-8d72-d2f7bf37c197-kube-api-access-xkvj4\") pod \"community-operators-b2jw4\" (UID: \"d54894d4-ce45-4397-8d72-d2f7bf37c197\") " pod="openshift-marketplace/community-operators-b2jw4" Sep 29 10:14:49 crc kubenswrapper[4779]: I0929 10:14:49.368244 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d54894d4-ce45-4397-8d72-d2f7bf37c197-utilities\") pod \"community-operators-b2jw4\" (UID: \"d54894d4-ce45-4397-8d72-d2f7bf37c197\") " pod="openshift-marketplace/community-operators-b2jw4" Sep 29 10:14:49 crc kubenswrapper[4779]: I0929 10:14:49.369317 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d54894d4-ce45-4397-8d72-d2f7bf37c197-catalog-content\") pod \"community-operators-b2jw4\" (UID: \"d54894d4-ce45-4397-8d72-d2f7bf37c197\") " pod="openshift-marketplace/community-operators-b2jw4" Sep 29 10:14:49 crc kubenswrapper[4779]: I0929 10:14:49.369796 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d54894d4-ce45-4397-8d72-d2f7bf37c197-utilities\") pod \"community-operators-b2jw4\" (UID: \"d54894d4-ce45-4397-8d72-d2f7bf37c197\") " pod="openshift-marketplace/community-operators-b2jw4" Sep 29 10:14:49 crc kubenswrapper[4779]: I0929 10:14:49.391847 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xkvj4\" (UniqueName: \"kubernetes.io/projected/d54894d4-ce45-4397-8d72-d2f7bf37c197-kube-api-access-xkvj4\") pod \"community-operators-b2jw4\" (UID: \"d54894d4-ce45-4397-8d72-d2f7bf37c197\") " pod="openshift-marketplace/community-operators-b2jw4" Sep 29 10:14:49 crc kubenswrapper[4779]: I0929 10:14:49.467174 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-b2jw4" Sep 29 10:14:50 crc kubenswrapper[4779]: I0929 10:14:50.023648 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-b2jw4"] Sep 29 10:14:50 crc kubenswrapper[4779]: I0929 10:14:50.805702 4779 generic.go:334] "Generic (PLEG): container finished" podID="d54894d4-ce45-4397-8d72-d2f7bf37c197" containerID="4f0e8e8ee2b3e2521bd28c55fd13e05402bfc12d4974458eb9073ade470b8fc3" exitCode=0 Sep 29 10:14:50 crc kubenswrapper[4779]: I0929 10:14:50.805859 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b2jw4" event={"ID":"d54894d4-ce45-4397-8d72-d2f7bf37c197","Type":"ContainerDied","Data":"4f0e8e8ee2b3e2521bd28c55fd13e05402bfc12d4974458eb9073ade470b8fc3"} Sep 29 10:14:50 crc kubenswrapper[4779]: I0929 10:14:50.806366 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b2jw4" event={"ID":"d54894d4-ce45-4397-8d72-d2f7bf37c197","Type":"ContainerStarted","Data":"887a2f77cfca55749cd7f20b27824cc53f3a294db4df87d2b52985e2ebb081d6"} Sep 29 10:14:51 crc kubenswrapper[4779]: I0929 10:14:51.820044 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b2jw4" event={"ID":"d54894d4-ce45-4397-8d72-d2f7bf37c197","Type":"ContainerStarted","Data":"4e440e4999d1d77071b166d839504bf42e1ba5ef939f7588b92df028dcf89aeb"} Sep 29 10:14:52 crc kubenswrapper[4779]: I0929 10:14:52.834145 4779 generic.go:334] "Generic (PLEG): container finished" podID="d54894d4-ce45-4397-8d72-d2f7bf37c197" containerID="4e440e4999d1d77071b166d839504bf42e1ba5ef939f7588b92df028dcf89aeb" exitCode=0 Sep 29 10:14:52 crc kubenswrapper[4779]: I0929 10:14:52.834237 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b2jw4" event={"ID":"d54894d4-ce45-4397-8d72-d2f7bf37c197","Type":"ContainerDied","Data":"4e440e4999d1d77071b166d839504bf42e1ba5ef939f7588b92df028dcf89aeb"} Sep 29 10:14:53 crc kubenswrapper[4779]: I0929 10:14:53.714295 4779 scope.go:117] "RemoveContainer" containerID="6c3b4d9e1288a1d587469d8a17efdcacdfb592049e65b194a12258479174566e" Sep 29 10:14:53 crc kubenswrapper[4779]: I0929 10:14:53.848681 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b2jw4" event={"ID":"d54894d4-ce45-4397-8d72-d2f7bf37c197","Type":"ContainerStarted","Data":"10ec6a4df8c766d835ff5bca2c575ef344364abd894df40610acef6f18e260d4"} Sep 29 10:14:53 crc kubenswrapper[4779]: I0929 10:14:53.876052 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-b2jw4" podStartSLOduration=2.38140076 podStartE2EDuration="4.876023927s" podCreationTimestamp="2025-09-29 10:14:49 +0000 UTC" firstStartedPulling="2025-09-29 10:14:50.809088422 +0000 UTC m=+2722.790412336" lastFinishedPulling="2025-09-29 10:14:53.303711559 +0000 UTC m=+2725.285035503" observedRunningTime="2025-09-29 10:14:53.870213188 +0000 UTC m=+2725.851537102" watchObservedRunningTime="2025-09-29 10:14:53.876023927 +0000 UTC m=+2725.857347841" Sep 29 10:14:54 crc kubenswrapper[4779]: I0929 10:14:54.861519 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" event={"ID":"f1a5d3a7-37d9-4a87-864c-e4af7f504a19","Type":"ContainerStarted","Data":"9da53dcd1003940833ba3cee687f77a5c20c7a240309846ec86bc0f5cfe14e26"} Sep 29 10:14:59 crc kubenswrapper[4779]: I0929 10:14:59.468623 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-b2jw4" Sep 29 10:14:59 crc kubenswrapper[4779]: I0929 10:14:59.469445 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-b2jw4" Sep 29 10:14:59 crc kubenswrapper[4779]: I0929 10:14:59.565642 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-b2jw4" Sep 29 10:14:59 crc kubenswrapper[4779]: I0929 10:14:59.993716 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-b2jw4" Sep 29 10:15:00 crc kubenswrapper[4779]: I0929 10:15:00.058318 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-b2jw4"] Sep 29 10:15:00 crc kubenswrapper[4779]: I0929 10:15:00.165056 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319015-4ptkv"] Sep 29 10:15:00 crc kubenswrapper[4779]: I0929 10:15:00.167397 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319015-4ptkv" Sep 29 10:15:00 crc kubenswrapper[4779]: I0929 10:15:00.170640 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 29 10:15:00 crc kubenswrapper[4779]: I0929 10:15:00.171320 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 29 10:15:00 crc kubenswrapper[4779]: I0929 10:15:00.188000 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319015-4ptkv"] Sep 29 10:15:00 crc kubenswrapper[4779]: I0929 10:15:00.309316 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2ca761fb-d727-4f81-8a0e-455b1699bfcc-secret-volume\") pod \"collect-profiles-29319015-4ptkv\" (UID: \"2ca761fb-d727-4f81-8a0e-455b1699bfcc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319015-4ptkv" Sep 29 10:15:00 crc kubenswrapper[4779]: I0929 10:15:00.309361 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2ca761fb-d727-4f81-8a0e-455b1699bfcc-config-volume\") pod \"collect-profiles-29319015-4ptkv\" (UID: \"2ca761fb-d727-4f81-8a0e-455b1699bfcc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319015-4ptkv" Sep 29 10:15:00 crc kubenswrapper[4779]: I0929 10:15:00.309491 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xjjm8\" (UniqueName: \"kubernetes.io/projected/2ca761fb-d727-4f81-8a0e-455b1699bfcc-kube-api-access-xjjm8\") pod \"collect-profiles-29319015-4ptkv\" (UID: \"2ca761fb-d727-4f81-8a0e-455b1699bfcc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319015-4ptkv" Sep 29 10:15:00 crc kubenswrapper[4779]: I0929 10:15:00.411289 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xjjm8\" (UniqueName: \"kubernetes.io/projected/2ca761fb-d727-4f81-8a0e-455b1699bfcc-kube-api-access-xjjm8\") pod \"collect-profiles-29319015-4ptkv\" (UID: \"2ca761fb-d727-4f81-8a0e-455b1699bfcc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319015-4ptkv" Sep 29 10:15:00 crc kubenswrapper[4779]: I0929 10:15:00.411410 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2ca761fb-d727-4f81-8a0e-455b1699bfcc-secret-volume\") pod \"collect-profiles-29319015-4ptkv\" (UID: \"2ca761fb-d727-4f81-8a0e-455b1699bfcc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319015-4ptkv" Sep 29 10:15:00 crc kubenswrapper[4779]: I0929 10:15:00.411449 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2ca761fb-d727-4f81-8a0e-455b1699bfcc-config-volume\") pod \"collect-profiles-29319015-4ptkv\" (UID: \"2ca761fb-d727-4f81-8a0e-455b1699bfcc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319015-4ptkv" Sep 29 10:15:00 crc kubenswrapper[4779]: I0929 10:15:00.412733 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2ca761fb-d727-4f81-8a0e-455b1699bfcc-config-volume\") pod \"collect-profiles-29319015-4ptkv\" (UID: \"2ca761fb-d727-4f81-8a0e-455b1699bfcc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319015-4ptkv" Sep 29 10:15:00 crc kubenswrapper[4779]: I0929 10:15:00.420864 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2ca761fb-d727-4f81-8a0e-455b1699bfcc-secret-volume\") pod \"collect-profiles-29319015-4ptkv\" (UID: \"2ca761fb-d727-4f81-8a0e-455b1699bfcc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319015-4ptkv" Sep 29 10:15:00 crc kubenswrapper[4779]: I0929 10:15:00.431377 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xjjm8\" (UniqueName: \"kubernetes.io/projected/2ca761fb-d727-4f81-8a0e-455b1699bfcc-kube-api-access-xjjm8\") pod \"collect-profiles-29319015-4ptkv\" (UID: \"2ca761fb-d727-4f81-8a0e-455b1699bfcc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319015-4ptkv" Sep 29 10:15:00 crc kubenswrapper[4779]: I0929 10:15:00.511002 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319015-4ptkv" Sep 29 10:15:00 crc kubenswrapper[4779]: I0929 10:15:00.999132 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319015-4ptkv"] Sep 29 10:15:01 crc kubenswrapper[4779]: I0929 10:15:01.952500 4779 generic.go:334] "Generic (PLEG): container finished" podID="2ca761fb-d727-4f81-8a0e-455b1699bfcc" containerID="385d608e28994a585b53fb8931ce09d66a7e1fcae3312b9ffa3a737b624247f3" exitCode=0 Sep 29 10:15:01 crc kubenswrapper[4779]: I0929 10:15:01.952601 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319015-4ptkv" event={"ID":"2ca761fb-d727-4f81-8a0e-455b1699bfcc","Type":"ContainerDied","Data":"385d608e28994a585b53fb8931ce09d66a7e1fcae3312b9ffa3a737b624247f3"} Sep 29 10:15:01 crc kubenswrapper[4779]: I0929 10:15:01.952999 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319015-4ptkv" event={"ID":"2ca761fb-d727-4f81-8a0e-455b1699bfcc","Type":"ContainerStarted","Data":"1e49721aab634d6fb00ed51537f792f15b0ccef8a9210660923c7ad860ae0967"} Sep 29 10:15:01 crc kubenswrapper[4779]: I0929 10:15:01.953200 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-b2jw4" podUID="d54894d4-ce45-4397-8d72-d2f7bf37c197" containerName="registry-server" containerID="cri-o://10ec6a4df8c766d835ff5bca2c575ef344364abd894df40610acef6f18e260d4" gracePeriod=2 Sep 29 10:15:02 crc kubenswrapper[4779]: I0929 10:15:02.500824 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-b2jw4" Sep 29 10:15:02 crc kubenswrapper[4779]: I0929 10:15:02.553752 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d54894d4-ce45-4397-8d72-d2f7bf37c197-utilities\") pod \"d54894d4-ce45-4397-8d72-d2f7bf37c197\" (UID: \"d54894d4-ce45-4397-8d72-d2f7bf37c197\") " Sep 29 10:15:02 crc kubenswrapper[4779]: I0929 10:15:02.553953 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xkvj4\" (UniqueName: \"kubernetes.io/projected/d54894d4-ce45-4397-8d72-d2f7bf37c197-kube-api-access-xkvj4\") pod \"d54894d4-ce45-4397-8d72-d2f7bf37c197\" (UID: \"d54894d4-ce45-4397-8d72-d2f7bf37c197\") " Sep 29 10:15:02 crc kubenswrapper[4779]: I0929 10:15:02.554243 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d54894d4-ce45-4397-8d72-d2f7bf37c197-catalog-content\") pod \"d54894d4-ce45-4397-8d72-d2f7bf37c197\" (UID: \"d54894d4-ce45-4397-8d72-d2f7bf37c197\") " Sep 29 10:15:02 crc kubenswrapper[4779]: I0929 10:15:02.555287 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d54894d4-ce45-4397-8d72-d2f7bf37c197-utilities" (OuterVolumeSpecName: "utilities") pod "d54894d4-ce45-4397-8d72-d2f7bf37c197" (UID: "d54894d4-ce45-4397-8d72-d2f7bf37c197"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 10:15:02 crc kubenswrapper[4779]: I0929 10:15:02.562058 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d54894d4-ce45-4397-8d72-d2f7bf37c197-kube-api-access-xkvj4" (OuterVolumeSpecName: "kube-api-access-xkvj4") pod "d54894d4-ce45-4397-8d72-d2f7bf37c197" (UID: "d54894d4-ce45-4397-8d72-d2f7bf37c197"). InnerVolumeSpecName "kube-api-access-xkvj4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 10:15:02 crc kubenswrapper[4779]: I0929 10:15:02.618364 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d54894d4-ce45-4397-8d72-d2f7bf37c197-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d54894d4-ce45-4397-8d72-d2f7bf37c197" (UID: "d54894d4-ce45-4397-8d72-d2f7bf37c197"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 10:15:02 crc kubenswrapper[4779]: I0929 10:15:02.657607 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d54894d4-ce45-4397-8d72-d2f7bf37c197-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 10:15:02 crc kubenswrapper[4779]: I0929 10:15:02.657642 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d54894d4-ce45-4397-8d72-d2f7bf37c197-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 10:15:02 crc kubenswrapper[4779]: I0929 10:15:02.657655 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xkvj4\" (UniqueName: \"kubernetes.io/projected/d54894d4-ce45-4397-8d72-d2f7bf37c197-kube-api-access-xkvj4\") on node \"crc\" DevicePath \"\"" Sep 29 10:15:02 crc kubenswrapper[4779]: I0929 10:15:02.971039 4779 generic.go:334] "Generic (PLEG): container finished" podID="d54894d4-ce45-4397-8d72-d2f7bf37c197" containerID="10ec6a4df8c766d835ff5bca2c575ef344364abd894df40610acef6f18e260d4" exitCode=0 Sep 29 10:15:02 crc kubenswrapper[4779]: I0929 10:15:02.971282 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b2jw4" event={"ID":"d54894d4-ce45-4397-8d72-d2f7bf37c197","Type":"ContainerDied","Data":"10ec6a4df8c766d835ff5bca2c575ef344364abd894df40610acef6f18e260d4"} Sep 29 10:15:02 crc kubenswrapper[4779]: I0929 10:15:02.971321 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b2jw4" event={"ID":"d54894d4-ce45-4397-8d72-d2f7bf37c197","Type":"ContainerDied","Data":"887a2f77cfca55749cd7f20b27824cc53f3a294db4df87d2b52985e2ebb081d6"} Sep 29 10:15:02 crc kubenswrapper[4779]: I0929 10:15:02.971276 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-b2jw4" Sep 29 10:15:02 crc kubenswrapper[4779]: I0929 10:15:02.971345 4779 scope.go:117] "RemoveContainer" containerID="10ec6a4df8c766d835ff5bca2c575ef344364abd894df40610acef6f18e260d4" Sep 29 10:15:03 crc kubenswrapper[4779]: I0929 10:15:03.007356 4779 scope.go:117] "RemoveContainer" containerID="4e440e4999d1d77071b166d839504bf42e1ba5ef939f7588b92df028dcf89aeb" Sep 29 10:15:03 crc kubenswrapper[4779]: I0929 10:15:03.012847 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-b2jw4"] Sep 29 10:15:03 crc kubenswrapper[4779]: I0929 10:15:03.023429 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-b2jw4"] Sep 29 10:15:03 crc kubenswrapper[4779]: I0929 10:15:03.039469 4779 scope.go:117] "RemoveContainer" containerID="4f0e8e8ee2b3e2521bd28c55fd13e05402bfc12d4974458eb9073ade470b8fc3" Sep 29 10:15:03 crc kubenswrapper[4779]: I0929 10:15:03.107255 4779 scope.go:117] "RemoveContainer" containerID="10ec6a4df8c766d835ff5bca2c575ef344364abd894df40610acef6f18e260d4" Sep 29 10:15:03 crc kubenswrapper[4779]: E0929 10:15:03.108469 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"10ec6a4df8c766d835ff5bca2c575ef344364abd894df40610acef6f18e260d4\": container with ID starting with 10ec6a4df8c766d835ff5bca2c575ef344364abd894df40610acef6f18e260d4 not found: ID does not exist" containerID="10ec6a4df8c766d835ff5bca2c575ef344364abd894df40610acef6f18e260d4" Sep 29 10:15:03 crc kubenswrapper[4779]: I0929 10:15:03.108510 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"10ec6a4df8c766d835ff5bca2c575ef344364abd894df40610acef6f18e260d4"} err="failed to get container status \"10ec6a4df8c766d835ff5bca2c575ef344364abd894df40610acef6f18e260d4\": rpc error: code = NotFound desc = could not find container \"10ec6a4df8c766d835ff5bca2c575ef344364abd894df40610acef6f18e260d4\": container with ID starting with 10ec6a4df8c766d835ff5bca2c575ef344364abd894df40610acef6f18e260d4 not found: ID does not exist" Sep 29 10:15:03 crc kubenswrapper[4779]: I0929 10:15:03.108538 4779 scope.go:117] "RemoveContainer" containerID="4e440e4999d1d77071b166d839504bf42e1ba5ef939f7588b92df028dcf89aeb" Sep 29 10:15:03 crc kubenswrapper[4779]: E0929 10:15:03.109169 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4e440e4999d1d77071b166d839504bf42e1ba5ef939f7588b92df028dcf89aeb\": container with ID starting with 4e440e4999d1d77071b166d839504bf42e1ba5ef939f7588b92df028dcf89aeb not found: ID does not exist" containerID="4e440e4999d1d77071b166d839504bf42e1ba5ef939f7588b92df028dcf89aeb" Sep 29 10:15:03 crc kubenswrapper[4779]: I0929 10:15:03.109226 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4e440e4999d1d77071b166d839504bf42e1ba5ef939f7588b92df028dcf89aeb"} err="failed to get container status \"4e440e4999d1d77071b166d839504bf42e1ba5ef939f7588b92df028dcf89aeb\": rpc error: code = NotFound desc = could not find container \"4e440e4999d1d77071b166d839504bf42e1ba5ef939f7588b92df028dcf89aeb\": container with ID starting with 4e440e4999d1d77071b166d839504bf42e1ba5ef939f7588b92df028dcf89aeb not found: ID does not exist" Sep 29 10:15:03 crc kubenswrapper[4779]: I0929 10:15:03.109269 4779 scope.go:117] "RemoveContainer" containerID="4f0e8e8ee2b3e2521bd28c55fd13e05402bfc12d4974458eb9073ade470b8fc3" Sep 29 10:15:03 crc kubenswrapper[4779]: E0929 10:15:03.109639 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4f0e8e8ee2b3e2521bd28c55fd13e05402bfc12d4974458eb9073ade470b8fc3\": container with ID starting with 4f0e8e8ee2b3e2521bd28c55fd13e05402bfc12d4974458eb9073ade470b8fc3 not found: ID does not exist" containerID="4f0e8e8ee2b3e2521bd28c55fd13e05402bfc12d4974458eb9073ade470b8fc3" Sep 29 10:15:03 crc kubenswrapper[4779]: I0929 10:15:03.109680 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4f0e8e8ee2b3e2521bd28c55fd13e05402bfc12d4974458eb9073ade470b8fc3"} err="failed to get container status \"4f0e8e8ee2b3e2521bd28c55fd13e05402bfc12d4974458eb9073ade470b8fc3\": rpc error: code = NotFound desc = could not find container \"4f0e8e8ee2b3e2521bd28c55fd13e05402bfc12d4974458eb9073ade470b8fc3\": container with ID starting with 4f0e8e8ee2b3e2521bd28c55fd13e05402bfc12d4974458eb9073ade470b8fc3 not found: ID does not exist" Sep 29 10:15:03 crc kubenswrapper[4779]: I0929 10:15:03.343558 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319015-4ptkv" Sep 29 10:15:03 crc kubenswrapper[4779]: I0929 10:15:03.492614 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xjjm8\" (UniqueName: \"kubernetes.io/projected/2ca761fb-d727-4f81-8a0e-455b1699bfcc-kube-api-access-xjjm8\") pod \"2ca761fb-d727-4f81-8a0e-455b1699bfcc\" (UID: \"2ca761fb-d727-4f81-8a0e-455b1699bfcc\") " Sep 29 10:15:03 crc kubenswrapper[4779]: I0929 10:15:03.493466 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2ca761fb-d727-4f81-8a0e-455b1699bfcc-secret-volume\") pod \"2ca761fb-d727-4f81-8a0e-455b1699bfcc\" (UID: \"2ca761fb-d727-4f81-8a0e-455b1699bfcc\") " Sep 29 10:15:03 crc kubenswrapper[4779]: I0929 10:15:03.493674 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2ca761fb-d727-4f81-8a0e-455b1699bfcc-config-volume\") pod \"2ca761fb-d727-4f81-8a0e-455b1699bfcc\" (UID: \"2ca761fb-d727-4f81-8a0e-455b1699bfcc\") " Sep 29 10:15:03 crc kubenswrapper[4779]: I0929 10:15:03.494181 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2ca761fb-d727-4f81-8a0e-455b1699bfcc-config-volume" (OuterVolumeSpecName: "config-volume") pod "2ca761fb-d727-4f81-8a0e-455b1699bfcc" (UID: "2ca761fb-d727-4f81-8a0e-455b1699bfcc"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 10:15:03 crc kubenswrapper[4779]: I0929 10:15:03.503609 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ca761fb-d727-4f81-8a0e-455b1699bfcc-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "2ca761fb-d727-4f81-8a0e-455b1699bfcc" (UID: "2ca761fb-d727-4f81-8a0e-455b1699bfcc"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:15:03 crc kubenswrapper[4779]: I0929 10:15:03.503963 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2ca761fb-d727-4f81-8a0e-455b1699bfcc-kube-api-access-xjjm8" (OuterVolumeSpecName: "kube-api-access-xjjm8") pod "2ca761fb-d727-4f81-8a0e-455b1699bfcc" (UID: "2ca761fb-d727-4f81-8a0e-455b1699bfcc"). InnerVolumeSpecName "kube-api-access-xjjm8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 10:15:03 crc kubenswrapper[4779]: I0929 10:15:03.597401 4779 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2ca761fb-d727-4f81-8a0e-455b1699bfcc-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 29 10:15:03 crc kubenswrapper[4779]: I0929 10:15:03.597744 4779 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2ca761fb-d727-4f81-8a0e-455b1699bfcc-config-volume\") on node \"crc\" DevicePath \"\"" Sep 29 10:15:03 crc kubenswrapper[4779]: I0929 10:15:03.597839 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xjjm8\" (UniqueName: \"kubernetes.io/projected/2ca761fb-d727-4f81-8a0e-455b1699bfcc-kube-api-access-xjjm8\") on node \"crc\" DevicePath \"\"" Sep 29 10:15:03 crc kubenswrapper[4779]: I0929 10:15:03.987100 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319015-4ptkv" Sep 29 10:15:03 crc kubenswrapper[4779]: I0929 10:15:03.987088 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319015-4ptkv" event={"ID":"2ca761fb-d727-4f81-8a0e-455b1699bfcc","Type":"ContainerDied","Data":"1e49721aab634d6fb00ed51537f792f15b0ccef8a9210660923c7ad860ae0967"} Sep 29 10:15:03 crc kubenswrapper[4779]: I0929 10:15:03.987318 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1e49721aab634d6fb00ed51537f792f15b0ccef8a9210660923c7ad860ae0967" Sep 29 10:15:04 crc kubenswrapper[4779]: I0929 10:15:04.456121 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29318970-52fch"] Sep 29 10:15:04 crc kubenswrapper[4779]: I0929 10:15:04.464122 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29318970-52fch"] Sep 29 10:15:04 crc kubenswrapper[4779]: I0929 10:15:04.733121 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bbb3233c-65ea-4bb4-aaa5-1d80fef4638d" path="/var/lib/kubelet/pods/bbb3233c-65ea-4bb4-aaa5-1d80fef4638d/volumes" Sep 29 10:15:04 crc kubenswrapper[4779]: I0929 10:15:04.734447 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d54894d4-ce45-4397-8d72-d2f7bf37c197" path="/var/lib/kubelet/pods/d54894d4-ce45-4397-8d72-d2f7bf37c197/volumes" Sep 29 10:15:32 crc kubenswrapper[4779]: I0929 10:15:32.146369 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-8tdqf"] Sep 29 10:15:32 crc kubenswrapper[4779]: E0929 10:15:32.147570 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ca761fb-d727-4f81-8a0e-455b1699bfcc" containerName="collect-profiles" Sep 29 10:15:32 crc kubenswrapper[4779]: I0929 10:15:32.147586 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ca761fb-d727-4f81-8a0e-455b1699bfcc" containerName="collect-profiles" Sep 29 10:15:32 crc kubenswrapper[4779]: E0929 10:15:32.147611 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d54894d4-ce45-4397-8d72-d2f7bf37c197" containerName="extract-utilities" Sep 29 10:15:32 crc kubenswrapper[4779]: I0929 10:15:32.147619 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="d54894d4-ce45-4397-8d72-d2f7bf37c197" containerName="extract-utilities" Sep 29 10:15:32 crc kubenswrapper[4779]: E0929 10:15:32.147643 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d54894d4-ce45-4397-8d72-d2f7bf37c197" containerName="registry-server" Sep 29 10:15:32 crc kubenswrapper[4779]: I0929 10:15:32.147653 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="d54894d4-ce45-4397-8d72-d2f7bf37c197" containerName="registry-server" Sep 29 10:15:32 crc kubenswrapper[4779]: E0929 10:15:32.147679 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d54894d4-ce45-4397-8d72-d2f7bf37c197" containerName="extract-content" Sep 29 10:15:32 crc kubenswrapper[4779]: I0929 10:15:32.147686 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="d54894d4-ce45-4397-8d72-d2f7bf37c197" containerName="extract-content" Sep 29 10:15:32 crc kubenswrapper[4779]: I0929 10:15:32.147889 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ca761fb-d727-4f81-8a0e-455b1699bfcc" containerName="collect-profiles" Sep 29 10:15:32 crc kubenswrapper[4779]: I0929 10:15:32.147935 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="d54894d4-ce45-4397-8d72-d2f7bf37c197" containerName="registry-server" Sep 29 10:15:32 crc kubenswrapper[4779]: I0929 10:15:32.149637 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8tdqf" Sep 29 10:15:32 crc kubenswrapper[4779]: I0929 10:15:32.171994 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8tdqf"] Sep 29 10:15:32 crc kubenswrapper[4779]: I0929 10:15:32.219581 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/98542bb6-032f-4020-943b-95f6be851a71-utilities\") pod \"certified-operators-8tdqf\" (UID: \"98542bb6-032f-4020-943b-95f6be851a71\") " pod="openshift-marketplace/certified-operators-8tdqf" Sep 29 10:15:32 crc kubenswrapper[4779]: I0929 10:15:32.219677 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g7q9l\" (UniqueName: \"kubernetes.io/projected/98542bb6-032f-4020-943b-95f6be851a71-kube-api-access-g7q9l\") pod \"certified-operators-8tdqf\" (UID: \"98542bb6-032f-4020-943b-95f6be851a71\") " pod="openshift-marketplace/certified-operators-8tdqf" Sep 29 10:15:32 crc kubenswrapper[4779]: I0929 10:15:32.219831 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/98542bb6-032f-4020-943b-95f6be851a71-catalog-content\") pod \"certified-operators-8tdqf\" (UID: \"98542bb6-032f-4020-943b-95f6be851a71\") " pod="openshift-marketplace/certified-operators-8tdqf" Sep 29 10:15:32 crc kubenswrapper[4779]: I0929 10:15:32.322809 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/98542bb6-032f-4020-943b-95f6be851a71-catalog-content\") pod \"certified-operators-8tdqf\" (UID: \"98542bb6-032f-4020-943b-95f6be851a71\") " pod="openshift-marketplace/certified-operators-8tdqf" Sep 29 10:15:32 crc kubenswrapper[4779]: I0929 10:15:32.322889 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/98542bb6-032f-4020-943b-95f6be851a71-utilities\") pod \"certified-operators-8tdqf\" (UID: \"98542bb6-032f-4020-943b-95f6be851a71\") " pod="openshift-marketplace/certified-operators-8tdqf" Sep 29 10:15:32 crc kubenswrapper[4779]: I0929 10:15:32.322983 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g7q9l\" (UniqueName: \"kubernetes.io/projected/98542bb6-032f-4020-943b-95f6be851a71-kube-api-access-g7q9l\") pod \"certified-operators-8tdqf\" (UID: \"98542bb6-032f-4020-943b-95f6be851a71\") " pod="openshift-marketplace/certified-operators-8tdqf" Sep 29 10:15:32 crc kubenswrapper[4779]: I0929 10:15:32.323324 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/98542bb6-032f-4020-943b-95f6be851a71-catalog-content\") pod \"certified-operators-8tdqf\" (UID: \"98542bb6-032f-4020-943b-95f6be851a71\") " pod="openshift-marketplace/certified-operators-8tdqf" Sep 29 10:15:32 crc kubenswrapper[4779]: I0929 10:15:32.323442 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/98542bb6-032f-4020-943b-95f6be851a71-utilities\") pod \"certified-operators-8tdqf\" (UID: \"98542bb6-032f-4020-943b-95f6be851a71\") " pod="openshift-marketplace/certified-operators-8tdqf" Sep 29 10:15:32 crc kubenswrapper[4779]: I0929 10:15:32.346591 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g7q9l\" (UniqueName: \"kubernetes.io/projected/98542bb6-032f-4020-943b-95f6be851a71-kube-api-access-g7q9l\") pod \"certified-operators-8tdqf\" (UID: \"98542bb6-032f-4020-943b-95f6be851a71\") " pod="openshift-marketplace/certified-operators-8tdqf" Sep 29 10:15:32 crc kubenswrapper[4779]: I0929 10:15:32.471722 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8tdqf" Sep 29 10:15:33 crc kubenswrapper[4779]: I0929 10:15:33.021575 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8tdqf"] Sep 29 10:15:33 crc kubenswrapper[4779]: I0929 10:15:33.334269 4779 generic.go:334] "Generic (PLEG): container finished" podID="98542bb6-032f-4020-943b-95f6be851a71" containerID="1a4b2e28ca0f6885f6e73df30b252b0be0ec9e888fc0af9a1e2fc2d14934a85f" exitCode=0 Sep 29 10:15:33 crc kubenswrapper[4779]: I0929 10:15:33.334709 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8tdqf" event={"ID":"98542bb6-032f-4020-943b-95f6be851a71","Type":"ContainerDied","Data":"1a4b2e28ca0f6885f6e73df30b252b0be0ec9e888fc0af9a1e2fc2d14934a85f"} Sep 29 10:15:33 crc kubenswrapper[4779]: I0929 10:15:33.334745 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8tdqf" event={"ID":"98542bb6-032f-4020-943b-95f6be851a71","Type":"ContainerStarted","Data":"3f850f6ea595a448cc51197d1ca51c92caf433607e7bcfcdb5f895e33598460b"} Sep 29 10:15:34 crc kubenswrapper[4779]: I0929 10:15:34.351818 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8tdqf" event={"ID":"98542bb6-032f-4020-943b-95f6be851a71","Type":"ContainerStarted","Data":"ab71da551c37e850a618a1665302c32334864006e9c19475296bed9e25b1b38c"} Sep 29 10:15:35 crc kubenswrapper[4779]: I0929 10:15:35.368846 4779 generic.go:334] "Generic (PLEG): container finished" podID="98542bb6-032f-4020-943b-95f6be851a71" containerID="ab71da551c37e850a618a1665302c32334864006e9c19475296bed9e25b1b38c" exitCode=0 Sep 29 10:15:35 crc kubenswrapper[4779]: I0929 10:15:35.369051 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8tdqf" event={"ID":"98542bb6-032f-4020-943b-95f6be851a71","Type":"ContainerDied","Data":"ab71da551c37e850a618a1665302c32334864006e9c19475296bed9e25b1b38c"} Sep 29 10:15:37 crc kubenswrapper[4779]: I0929 10:15:37.400381 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8tdqf" event={"ID":"98542bb6-032f-4020-943b-95f6be851a71","Type":"ContainerStarted","Data":"9b7274296d325a72ecd0b922b8c7a0e36aebe319b82b66fea110fa8280908f25"} Sep 29 10:15:37 crc kubenswrapper[4779]: I0929 10:15:37.423708 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-8tdqf" podStartSLOduration=2.497328695 podStartE2EDuration="5.42367712s" podCreationTimestamp="2025-09-29 10:15:32 +0000 UTC" firstStartedPulling="2025-09-29 10:15:33.338018411 +0000 UTC m=+2765.319342315" lastFinishedPulling="2025-09-29 10:15:36.264366836 +0000 UTC m=+2768.245690740" observedRunningTime="2025-09-29 10:15:37.423361441 +0000 UTC m=+2769.404685345" watchObservedRunningTime="2025-09-29 10:15:37.42367712 +0000 UTC m=+2769.405001034" Sep 29 10:15:40 crc kubenswrapper[4779]: I0929 10:15:40.948428 4779 scope.go:117] "RemoveContainer" containerID="73b98a46386d9723b0f19c6b761bac75c7313cf4fbae6ab2a6d855c16a45a1c5" Sep 29 10:15:42 crc kubenswrapper[4779]: I0929 10:15:42.472103 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-8tdqf" Sep 29 10:15:42 crc kubenswrapper[4779]: I0929 10:15:42.472549 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-8tdqf" Sep 29 10:15:42 crc kubenswrapper[4779]: I0929 10:15:42.563131 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-8tdqf" Sep 29 10:15:43 crc kubenswrapper[4779]: I0929 10:15:43.524448 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-8tdqf" Sep 29 10:15:43 crc kubenswrapper[4779]: I0929 10:15:43.606342 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-8tdqf"] Sep 29 10:15:45 crc kubenswrapper[4779]: I0929 10:15:45.485264 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-8tdqf" podUID="98542bb6-032f-4020-943b-95f6be851a71" containerName="registry-server" containerID="cri-o://9b7274296d325a72ecd0b922b8c7a0e36aebe319b82b66fea110fa8280908f25" gracePeriod=2 Sep 29 10:15:46 crc kubenswrapper[4779]: I0929 10:15:46.021990 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8tdqf" Sep 29 10:15:46 crc kubenswrapper[4779]: I0929 10:15:46.071253 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g7q9l\" (UniqueName: \"kubernetes.io/projected/98542bb6-032f-4020-943b-95f6be851a71-kube-api-access-g7q9l\") pod \"98542bb6-032f-4020-943b-95f6be851a71\" (UID: \"98542bb6-032f-4020-943b-95f6be851a71\") " Sep 29 10:15:46 crc kubenswrapper[4779]: I0929 10:15:46.071433 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/98542bb6-032f-4020-943b-95f6be851a71-catalog-content\") pod \"98542bb6-032f-4020-943b-95f6be851a71\" (UID: \"98542bb6-032f-4020-943b-95f6be851a71\") " Sep 29 10:15:46 crc kubenswrapper[4779]: I0929 10:15:46.071541 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/98542bb6-032f-4020-943b-95f6be851a71-utilities\") pod \"98542bb6-032f-4020-943b-95f6be851a71\" (UID: \"98542bb6-032f-4020-943b-95f6be851a71\") " Sep 29 10:15:46 crc kubenswrapper[4779]: I0929 10:15:46.073188 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/98542bb6-032f-4020-943b-95f6be851a71-utilities" (OuterVolumeSpecName: "utilities") pod "98542bb6-032f-4020-943b-95f6be851a71" (UID: "98542bb6-032f-4020-943b-95f6be851a71"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 10:15:46 crc kubenswrapper[4779]: I0929 10:15:46.084212 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/98542bb6-032f-4020-943b-95f6be851a71-kube-api-access-g7q9l" (OuterVolumeSpecName: "kube-api-access-g7q9l") pod "98542bb6-032f-4020-943b-95f6be851a71" (UID: "98542bb6-032f-4020-943b-95f6be851a71"). InnerVolumeSpecName "kube-api-access-g7q9l". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 10:15:46 crc kubenswrapper[4779]: I0929 10:15:46.132327 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/98542bb6-032f-4020-943b-95f6be851a71-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "98542bb6-032f-4020-943b-95f6be851a71" (UID: "98542bb6-032f-4020-943b-95f6be851a71"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 10:15:46 crc kubenswrapper[4779]: I0929 10:15:46.174071 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/98542bb6-032f-4020-943b-95f6be851a71-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 10:15:46 crc kubenswrapper[4779]: I0929 10:15:46.174098 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g7q9l\" (UniqueName: \"kubernetes.io/projected/98542bb6-032f-4020-943b-95f6be851a71-kube-api-access-g7q9l\") on node \"crc\" DevicePath \"\"" Sep 29 10:15:46 crc kubenswrapper[4779]: I0929 10:15:46.174107 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/98542bb6-032f-4020-943b-95f6be851a71-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 10:15:46 crc kubenswrapper[4779]: I0929 10:15:46.504267 4779 generic.go:334] "Generic (PLEG): container finished" podID="98542bb6-032f-4020-943b-95f6be851a71" containerID="9b7274296d325a72ecd0b922b8c7a0e36aebe319b82b66fea110fa8280908f25" exitCode=0 Sep 29 10:15:46 crc kubenswrapper[4779]: I0929 10:15:46.504368 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8tdqf" event={"ID":"98542bb6-032f-4020-943b-95f6be851a71","Type":"ContainerDied","Data":"9b7274296d325a72ecd0b922b8c7a0e36aebe319b82b66fea110fa8280908f25"} Sep 29 10:15:46 crc kubenswrapper[4779]: I0929 10:15:46.504402 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8tdqf" event={"ID":"98542bb6-032f-4020-943b-95f6be851a71","Type":"ContainerDied","Data":"3f850f6ea595a448cc51197d1ca51c92caf433607e7bcfcdb5f895e33598460b"} Sep 29 10:15:46 crc kubenswrapper[4779]: I0929 10:15:46.504402 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8tdqf" Sep 29 10:15:46 crc kubenswrapper[4779]: I0929 10:15:46.504423 4779 scope.go:117] "RemoveContainer" containerID="9b7274296d325a72ecd0b922b8c7a0e36aebe319b82b66fea110fa8280908f25" Sep 29 10:15:46 crc kubenswrapper[4779]: I0929 10:15:46.563596 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-8tdqf"] Sep 29 10:15:46 crc kubenswrapper[4779]: I0929 10:15:46.574300 4779 scope.go:117] "RemoveContainer" containerID="ab71da551c37e850a618a1665302c32334864006e9c19475296bed9e25b1b38c" Sep 29 10:15:46 crc kubenswrapper[4779]: I0929 10:15:46.576577 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-8tdqf"] Sep 29 10:15:46 crc kubenswrapper[4779]: I0929 10:15:46.617556 4779 scope.go:117] "RemoveContainer" containerID="1a4b2e28ca0f6885f6e73df30b252b0be0ec9e888fc0af9a1e2fc2d14934a85f" Sep 29 10:15:46 crc kubenswrapper[4779]: I0929 10:15:46.662176 4779 scope.go:117] "RemoveContainer" containerID="9b7274296d325a72ecd0b922b8c7a0e36aebe319b82b66fea110fa8280908f25" Sep 29 10:15:46 crc kubenswrapper[4779]: E0929 10:15:46.662730 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9b7274296d325a72ecd0b922b8c7a0e36aebe319b82b66fea110fa8280908f25\": container with ID starting with 9b7274296d325a72ecd0b922b8c7a0e36aebe319b82b66fea110fa8280908f25 not found: ID does not exist" containerID="9b7274296d325a72ecd0b922b8c7a0e36aebe319b82b66fea110fa8280908f25" Sep 29 10:15:46 crc kubenswrapper[4779]: I0929 10:15:46.662785 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9b7274296d325a72ecd0b922b8c7a0e36aebe319b82b66fea110fa8280908f25"} err="failed to get container status \"9b7274296d325a72ecd0b922b8c7a0e36aebe319b82b66fea110fa8280908f25\": rpc error: code = NotFound desc = could not find container \"9b7274296d325a72ecd0b922b8c7a0e36aebe319b82b66fea110fa8280908f25\": container with ID starting with 9b7274296d325a72ecd0b922b8c7a0e36aebe319b82b66fea110fa8280908f25 not found: ID does not exist" Sep 29 10:15:46 crc kubenswrapper[4779]: I0929 10:15:46.662817 4779 scope.go:117] "RemoveContainer" containerID="ab71da551c37e850a618a1665302c32334864006e9c19475296bed9e25b1b38c" Sep 29 10:15:46 crc kubenswrapper[4779]: E0929 10:15:46.663448 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ab71da551c37e850a618a1665302c32334864006e9c19475296bed9e25b1b38c\": container with ID starting with ab71da551c37e850a618a1665302c32334864006e9c19475296bed9e25b1b38c not found: ID does not exist" containerID="ab71da551c37e850a618a1665302c32334864006e9c19475296bed9e25b1b38c" Sep 29 10:15:46 crc kubenswrapper[4779]: I0929 10:15:46.663480 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ab71da551c37e850a618a1665302c32334864006e9c19475296bed9e25b1b38c"} err="failed to get container status \"ab71da551c37e850a618a1665302c32334864006e9c19475296bed9e25b1b38c\": rpc error: code = NotFound desc = could not find container \"ab71da551c37e850a618a1665302c32334864006e9c19475296bed9e25b1b38c\": container with ID starting with ab71da551c37e850a618a1665302c32334864006e9c19475296bed9e25b1b38c not found: ID does not exist" Sep 29 10:15:46 crc kubenswrapper[4779]: I0929 10:15:46.663508 4779 scope.go:117] "RemoveContainer" containerID="1a4b2e28ca0f6885f6e73df30b252b0be0ec9e888fc0af9a1e2fc2d14934a85f" Sep 29 10:15:46 crc kubenswrapper[4779]: E0929 10:15:46.664000 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1a4b2e28ca0f6885f6e73df30b252b0be0ec9e888fc0af9a1e2fc2d14934a85f\": container with ID starting with 1a4b2e28ca0f6885f6e73df30b252b0be0ec9e888fc0af9a1e2fc2d14934a85f not found: ID does not exist" containerID="1a4b2e28ca0f6885f6e73df30b252b0be0ec9e888fc0af9a1e2fc2d14934a85f" Sep 29 10:15:46 crc kubenswrapper[4779]: I0929 10:15:46.664121 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a4b2e28ca0f6885f6e73df30b252b0be0ec9e888fc0af9a1e2fc2d14934a85f"} err="failed to get container status \"1a4b2e28ca0f6885f6e73df30b252b0be0ec9e888fc0af9a1e2fc2d14934a85f\": rpc error: code = NotFound desc = could not find container \"1a4b2e28ca0f6885f6e73df30b252b0be0ec9e888fc0af9a1e2fc2d14934a85f\": container with ID starting with 1a4b2e28ca0f6885f6e73df30b252b0be0ec9e888fc0af9a1e2fc2d14934a85f not found: ID does not exist" Sep 29 10:15:46 crc kubenswrapper[4779]: I0929 10:15:46.729134 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="98542bb6-032f-4020-943b-95f6be851a71" path="/var/lib/kubelet/pods/98542bb6-032f-4020-943b-95f6be851a71/volumes" Sep 29 10:17:16 crc kubenswrapper[4779]: I0929 10:17:16.966024 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 10:17:16 crc kubenswrapper[4779]: I0929 10:17:16.966649 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 10:17:46 crc kubenswrapper[4779]: I0929 10:17:46.967165 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 10:17:46 crc kubenswrapper[4779]: I0929 10:17:46.967974 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 10:18:03 crc kubenswrapper[4779]: I0929 10:18:03.943592 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-pdkn4"] Sep 29 10:18:03 crc kubenswrapper[4779]: E0929 10:18:03.945147 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98542bb6-032f-4020-943b-95f6be851a71" containerName="registry-server" Sep 29 10:18:03 crc kubenswrapper[4779]: I0929 10:18:03.945173 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="98542bb6-032f-4020-943b-95f6be851a71" containerName="registry-server" Sep 29 10:18:03 crc kubenswrapper[4779]: E0929 10:18:03.945205 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98542bb6-032f-4020-943b-95f6be851a71" containerName="extract-utilities" Sep 29 10:18:03 crc kubenswrapper[4779]: I0929 10:18:03.945215 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="98542bb6-032f-4020-943b-95f6be851a71" containerName="extract-utilities" Sep 29 10:18:03 crc kubenswrapper[4779]: E0929 10:18:03.945239 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98542bb6-032f-4020-943b-95f6be851a71" containerName="extract-content" Sep 29 10:18:03 crc kubenswrapper[4779]: I0929 10:18:03.945249 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="98542bb6-032f-4020-943b-95f6be851a71" containerName="extract-content" Sep 29 10:18:03 crc kubenswrapper[4779]: I0929 10:18:03.945586 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="98542bb6-032f-4020-943b-95f6be851a71" containerName="registry-server" Sep 29 10:18:03 crc kubenswrapper[4779]: I0929 10:18:03.948312 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pdkn4" Sep 29 10:18:03 crc kubenswrapper[4779]: I0929 10:18:03.957938 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pdkn4"] Sep 29 10:18:04 crc kubenswrapper[4779]: I0929 10:18:04.013129 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3f15c93d-9d62-4a71-82bf-d198e3bbefc6-catalog-content\") pod \"redhat-marketplace-pdkn4\" (UID: \"3f15c93d-9d62-4a71-82bf-d198e3bbefc6\") " pod="openshift-marketplace/redhat-marketplace-pdkn4" Sep 29 10:18:04 crc kubenswrapper[4779]: I0929 10:18:04.013977 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f4xxw\" (UniqueName: \"kubernetes.io/projected/3f15c93d-9d62-4a71-82bf-d198e3bbefc6-kube-api-access-f4xxw\") pod \"redhat-marketplace-pdkn4\" (UID: \"3f15c93d-9d62-4a71-82bf-d198e3bbefc6\") " pod="openshift-marketplace/redhat-marketplace-pdkn4" Sep 29 10:18:04 crc kubenswrapper[4779]: I0929 10:18:04.014227 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3f15c93d-9d62-4a71-82bf-d198e3bbefc6-utilities\") pod \"redhat-marketplace-pdkn4\" (UID: \"3f15c93d-9d62-4a71-82bf-d198e3bbefc6\") " pod="openshift-marketplace/redhat-marketplace-pdkn4" Sep 29 10:18:04 crc kubenswrapper[4779]: I0929 10:18:04.116965 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3f15c93d-9d62-4a71-82bf-d198e3bbefc6-catalog-content\") pod \"redhat-marketplace-pdkn4\" (UID: \"3f15c93d-9d62-4a71-82bf-d198e3bbefc6\") " pod="openshift-marketplace/redhat-marketplace-pdkn4" Sep 29 10:18:04 crc kubenswrapper[4779]: I0929 10:18:04.117376 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f4xxw\" (UniqueName: \"kubernetes.io/projected/3f15c93d-9d62-4a71-82bf-d198e3bbefc6-kube-api-access-f4xxw\") pod \"redhat-marketplace-pdkn4\" (UID: \"3f15c93d-9d62-4a71-82bf-d198e3bbefc6\") " pod="openshift-marketplace/redhat-marketplace-pdkn4" Sep 29 10:18:04 crc kubenswrapper[4779]: I0929 10:18:04.117501 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3f15c93d-9d62-4a71-82bf-d198e3bbefc6-catalog-content\") pod \"redhat-marketplace-pdkn4\" (UID: \"3f15c93d-9d62-4a71-82bf-d198e3bbefc6\") " pod="openshift-marketplace/redhat-marketplace-pdkn4" Sep 29 10:18:04 crc kubenswrapper[4779]: I0929 10:18:04.117893 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3f15c93d-9d62-4a71-82bf-d198e3bbefc6-utilities\") pod \"redhat-marketplace-pdkn4\" (UID: \"3f15c93d-9d62-4a71-82bf-d198e3bbefc6\") " pod="openshift-marketplace/redhat-marketplace-pdkn4" Sep 29 10:18:04 crc kubenswrapper[4779]: I0929 10:18:04.118310 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3f15c93d-9d62-4a71-82bf-d198e3bbefc6-utilities\") pod \"redhat-marketplace-pdkn4\" (UID: \"3f15c93d-9d62-4a71-82bf-d198e3bbefc6\") " pod="openshift-marketplace/redhat-marketplace-pdkn4" Sep 29 10:18:04 crc kubenswrapper[4779]: I0929 10:18:04.138120 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f4xxw\" (UniqueName: \"kubernetes.io/projected/3f15c93d-9d62-4a71-82bf-d198e3bbefc6-kube-api-access-f4xxw\") pod \"redhat-marketplace-pdkn4\" (UID: \"3f15c93d-9d62-4a71-82bf-d198e3bbefc6\") " pod="openshift-marketplace/redhat-marketplace-pdkn4" Sep 29 10:18:04 crc kubenswrapper[4779]: I0929 10:18:04.280954 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pdkn4" Sep 29 10:18:04 crc kubenswrapper[4779]: I0929 10:18:04.806165 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pdkn4"] Sep 29 10:18:04 crc kubenswrapper[4779]: W0929 10:18:04.822761 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3f15c93d_9d62_4a71_82bf_d198e3bbefc6.slice/crio-3396449248a15041787bda7b5c664bcec01315b913b75a52c8c5ab5be6a9ecb9 WatchSource:0}: Error finding container 3396449248a15041787bda7b5c664bcec01315b913b75a52c8c5ab5be6a9ecb9: Status 404 returned error can't find the container with id 3396449248a15041787bda7b5c664bcec01315b913b75a52c8c5ab5be6a9ecb9 Sep 29 10:18:05 crc kubenswrapper[4779]: I0929 10:18:05.145498 4779 generic.go:334] "Generic (PLEG): container finished" podID="3f15c93d-9d62-4a71-82bf-d198e3bbefc6" containerID="7e94feeaf09eb3e23db0815db4ab185b773bbe4005cd287dad1a5d3b803eb0f9" exitCode=0 Sep 29 10:18:05 crc kubenswrapper[4779]: I0929 10:18:05.145555 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pdkn4" event={"ID":"3f15c93d-9d62-4a71-82bf-d198e3bbefc6","Type":"ContainerDied","Data":"7e94feeaf09eb3e23db0815db4ab185b773bbe4005cd287dad1a5d3b803eb0f9"} Sep 29 10:18:05 crc kubenswrapper[4779]: I0929 10:18:05.145980 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pdkn4" event={"ID":"3f15c93d-9d62-4a71-82bf-d198e3bbefc6","Type":"ContainerStarted","Data":"3396449248a15041787bda7b5c664bcec01315b913b75a52c8c5ab5be6a9ecb9"} Sep 29 10:18:06 crc kubenswrapper[4779]: I0929 10:18:06.159117 4779 generic.go:334] "Generic (PLEG): container finished" podID="3f15c93d-9d62-4a71-82bf-d198e3bbefc6" containerID="e4ca3977ee7da252a2b461de1de5125589233b5bb8c9032aefeadc3aadb10f41" exitCode=0 Sep 29 10:18:06 crc kubenswrapper[4779]: I0929 10:18:06.159205 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pdkn4" event={"ID":"3f15c93d-9d62-4a71-82bf-d198e3bbefc6","Type":"ContainerDied","Data":"e4ca3977ee7da252a2b461de1de5125589233b5bb8c9032aefeadc3aadb10f41"} Sep 29 10:18:07 crc kubenswrapper[4779]: I0929 10:18:07.171140 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pdkn4" event={"ID":"3f15c93d-9d62-4a71-82bf-d198e3bbefc6","Type":"ContainerStarted","Data":"b4d62cf3012f6b6caaa121ba3010b63b163ab3784fc2589b75d81f91fbcaf887"} Sep 29 10:18:07 crc kubenswrapper[4779]: I0929 10:18:07.191885 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-pdkn4" podStartSLOduration=2.528540908 podStartE2EDuration="4.191864081s" podCreationTimestamp="2025-09-29 10:18:03 +0000 UTC" firstStartedPulling="2025-09-29 10:18:05.147600118 +0000 UTC m=+2917.128924032" lastFinishedPulling="2025-09-29 10:18:06.810923301 +0000 UTC m=+2918.792247205" observedRunningTime="2025-09-29 10:18:07.189585804 +0000 UTC m=+2919.170909718" watchObservedRunningTime="2025-09-29 10:18:07.191864081 +0000 UTC m=+2919.173187985" Sep 29 10:18:14 crc kubenswrapper[4779]: I0929 10:18:14.282285 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-pdkn4" Sep 29 10:18:14 crc kubenswrapper[4779]: I0929 10:18:14.283178 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-pdkn4" Sep 29 10:18:14 crc kubenswrapper[4779]: I0929 10:18:14.357474 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-pdkn4" Sep 29 10:18:15 crc kubenswrapper[4779]: I0929 10:18:15.336916 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-pdkn4" Sep 29 10:18:15 crc kubenswrapper[4779]: I0929 10:18:15.407298 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-pdkn4"] Sep 29 10:18:16 crc kubenswrapper[4779]: I0929 10:18:16.966771 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 10:18:16 crc kubenswrapper[4779]: I0929 10:18:16.967205 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 10:18:16 crc kubenswrapper[4779]: I0929 10:18:16.967257 4779 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" Sep 29 10:18:16 crc kubenswrapper[4779]: I0929 10:18:16.968248 4779 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9da53dcd1003940833ba3cee687f77a5c20c7a240309846ec86bc0f5cfe14e26"} pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 10:18:16 crc kubenswrapper[4779]: I0929 10:18:16.968343 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" containerID="cri-o://9da53dcd1003940833ba3cee687f77a5c20c7a240309846ec86bc0f5cfe14e26" gracePeriod=600 Sep 29 10:18:17 crc kubenswrapper[4779]: I0929 10:18:17.287612 4779 generic.go:334] "Generic (PLEG): container finished" podID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerID="9da53dcd1003940833ba3cee687f77a5c20c7a240309846ec86bc0f5cfe14e26" exitCode=0 Sep 29 10:18:17 crc kubenswrapper[4779]: I0929 10:18:17.287693 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" event={"ID":"f1a5d3a7-37d9-4a87-864c-e4af7f504a19","Type":"ContainerDied","Data":"9da53dcd1003940833ba3cee687f77a5c20c7a240309846ec86bc0f5cfe14e26"} Sep 29 10:18:17 crc kubenswrapper[4779]: I0929 10:18:17.288144 4779 scope.go:117] "RemoveContainer" containerID="6c3b4d9e1288a1d587469d8a17efdcacdfb592049e65b194a12258479174566e" Sep 29 10:18:17 crc kubenswrapper[4779]: I0929 10:18:17.288265 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-pdkn4" podUID="3f15c93d-9d62-4a71-82bf-d198e3bbefc6" containerName="registry-server" containerID="cri-o://b4d62cf3012f6b6caaa121ba3010b63b163ab3784fc2589b75d81f91fbcaf887" gracePeriod=2 Sep 29 10:18:17 crc kubenswrapper[4779]: I0929 10:18:17.868520 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pdkn4" Sep 29 10:18:17 crc kubenswrapper[4779]: I0929 10:18:17.954618 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3f15c93d-9d62-4a71-82bf-d198e3bbefc6-utilities\") pod \"3f15c93d-9d62-4a71-82bf-d198e3bbefc6\" (UID: \"3f15c93d-9d62-4a71-82bf-d198e3bbefc6\") " Sep 29 10:18:17 crc kubenswrapper[4779]: I0929 10:18:17.954770 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f4xxw\" (UniqueName: \"kubernetes.io/projected/3f15c93d-9d62-4a71-82bf-d198e3bbefc6-kube-api-access-f4xxw\") pod \"3f15c93d-9d62-4a71-82bf-d198e3bbefc6\" (UID: \"3f15c93d-9d62-4a71-82bf-d198e3bbefc6\") " Sep 29 10:18:17 crc kubenswrapper[4779]: I0929 10:18:17.954855 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3f15c93d-9d62-4a71-82bf-d198e3bbefc6-catalog-content\") pod \"3f15c93d-9d62-4a71-82bf-d198e3bbefc6\" (UID: \"3f15c93d-9d62-4a71-82bf-d198e3bbefc6\") " Sep 29 10:18:17 crc kubenswrapper[4779]: I0929 10:18:17.955522 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3f15c93d-9d62-4a71-82bf-d198e3bbefc6-utilities" (OuterVolumeSpecName: "utilities") pod "3f15c93d-9d62-4a71-82bf-d198e3bbefc6" (UID: "3f15c93d-9d62-4a71-82bf-d198e3bbefc6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 10:18:17 crc kubenswrapper[4779]: I0929 10:18:17.962004 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3f15c93d-9d62-4a71-82bf-d198e3bbefc6-kube-api-access-f4xxw" (OuterVolumeSpecName: "kube-api-access-f4xxw") pod "3f15c93d-9d62-4a71-82bf-d198e3bbefc6" (UID: "3f15c93d-9d62-4a71-82bf-d198e3bbefc6"). InnerVolumeSpecName "kube-api-access-f4xxw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 10:18:17 crc kubenswrapper[4779]: I0929 10:18:17.970234 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3f15c93d-9d62-4a71-82bf-d198e3bbefc6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3f15c93d-9d62-4a71-82bf-d198e3bbefc6" (UID: "3f15c93d-9d62-4a71-82bf-d198e3bbefc6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 10:18:18 crc kubenswrapper[4779]: I0929 10:18:18.057805 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3f15c93d-9d62-4a71-82bf-d198e3bbefc6-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 10:18:18 crc kubenswrapper[4779]: I0929 10:18:18.057860 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f4xxw\" (UniqueName: \"kubernetes.io/projected/3f15c93d-9d62-4a71-82bf-d198e3bbefc6-kube-api-access-f4xxw\") on node \"crc\" DevicePath \"\"" Sep 29 10:18:18 crc kubenswrapper[4779]: I0929 10:18:18.057877 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3f15c93d-9d62-4a71-82bf-d198e3bbefc6-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 10:18:18 crc kubenswrapper[4779]: I0929 10:18:18.303282 4779 generic.go:334] "Generic (PLEG): container finished" podID="3f15c93d-9d62-4a71-82bf-d198e3bbefc6" containerID="b4d62cf3012f6b6caaa121ba3010b63b163ab3784fc2589b75d81f91fbcaf887" exitCode=0 Sep 29 10:18:18 crc kubenswrapper[4779]: I0929 10:18:18.303354 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pdkn4" Sep 29 10:18:18 crc kubenswrapper[4779]: I0929 10:18:18.303336 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pdkn4" event={"ID":"3f15c93d-9d62-4a71-82bf-d198e3bbefc6","Type":"ContainerDied","Data":"b4d62cf3012f6b6caaa121ba3010b63b163ab3784fc2589b75d81f91fbcaf887"} Sep 29 10:18:18 crc kubenswrapper[4779]: I0929 10:18:18.303409 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pdkn4" event={"ID":"3f15c93d-9d62-4a71-82bf-d198e3bbefc6","Type":"ContainerDied","Data":"3396449248a15041787bda7b5c664bcec01315b913b75a52c8c5ab5be6a9ecb9"} Sep 29 10:18:18 crc kubenswrapper[4779]: I0929 10:18:18.303452 4779 scope.go:117] "RemoveContainer" containerID="b4d62cf3012f6b6caaa121ba3010b63b163ab3784fc2589b75d81f91fbcaf887" Sep 29 10:18:18 crc kubenswrapper[4779]: I0929 10:18:18.306867 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" event={"ID":"f1a5d3a7-37d9-4a87-864c-e4af7f504a19","Type":"ContainerStarted","Data":"a993e5a0edbe6448eaf61b4b45ffde78e59ae455e408d7dd1afa5a0ff12dc338"} Sep 29 10:18:18 crc kubenswrapper[4779]: I0929 10:18:18.367944 4779 scope.go:117] "RemoveContainer" containerID="e4ca3977ee7da252a2b461de1de5125589233b5bb8c9032aefeadc3aadb10f41" Sep 29 10:18:18 crc kubenswrapper[4779]: I0929 10:18:18.372059 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-pdkn4"] Sep 29 10:18:18 crc kubenswrapper[4779]: I0929 10:18:18.397643 4779 scope.go:117] "RemoveContainer" containerID="7e94feeaf09eb3e23db0815db4ab185b773bbe4005cd287dad1a5d3b803eb0f9" Sep 29 10:18:18 crc kubenswrapper[4779]: I0929 10:18:18.405011 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-pdkn4"] Sep 29 10:18:18 crc kubenswrapper[4779]: I0929 10:18:18.445130 4779 scope.go:117] "RemoveContainer" containerID="b4d62cf3012f6b6caaa121ba3010b63b163ab3784fc2589b75d81f91fbcaf887" Sep 29 10:18:18 crc kubenswrapper[4779]: E0929 10:18:18.445998 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b4d62cf3012f6b6caaa121ba3010b63b163ab3784fc2589b75d81f91fbcaf887\": container with ID starting with b4d62cf3012f6b6caaa121ba3010b63b163ab3784fc2589b75d81f91fbcaf887 not found: ID does not exist" containerID="b4d62cf3012f6b6caaa121ba3010b63b163ab3784fc2589b75d81f91fbcaf887" Sep 29 10:18:18 crc kubenswrapper[4779]: I0929 10:18:18.446065 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b4d62cf3012f6b6caaa121ba3010b63b163ab3784fc2589b75d81f91fbcaf887"} err="failed to get container status \"b4d62cf3012f6b6caaa121ba3010b63b163ab3784fc2589b75d81f91fbcaf887\": rpc error: code = NotFound desc = could not find container \"b4d62cf3012f6b6caaa121ba3010b63b163ab3784fc2589b75d81f91fbcaf887\": container with ID starting with b4d62cf3012f6b6caaa121ba3010b63b163ab3784fc2589b75d81f91fbcaf887 not found: ID does not exist" Sep 29 10:18:18 crc kubenswrapper[4779]: I0929 10:18:18.446107 4779 scope.go:117] "RemoveContainer" containerID="e4ca3977ee7da252a2b461de1de5125589233b5bb8c9032aefeadc3aadb10f41" Sep 29 10:18:18 crc kubenswrapper[4779]: E0929 10:18:18.446516 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e4ca3977ee7da252a2b461de1de5125589233b5bb8c9032aefeadc3aadb10f41\": container with ID starting with e4ca3977ee7da252a2b461de1de5125589233b5bb8c9032aefeadc3aadb10f41 not found: ID does not exist" containerID="e4ca3977ee7da252a2b461de1de5125589233b5bb8c9032aefeadc3aadb10f41" Sep 29 10:18:18 crc kubenswrapper[4779]: I0929 10:18:18.446548 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e4ca3977ee7da252a2b461de1de5125589233b5bb8c9032aefeadc3aadb10f41"} err="failed to get container status \"e4ca3977ee7da252a2b461de1de5125589233b5bb8c9032aefeadc3aadb10f41\": rpc error: code = NotFound desc = could not find container \"e4ca3977ee7da252a2b461de1de5125589233b5bb8c9032aefeadc3aadb10f41\": container with ID starting with e4ca3977ee7da252a2b461de1de5125589233b5bb8c9032aefeadc3aadb10f41 not found: ID does not exist" Sep 29 10:18:18 crc kubenswrapper[4779]: I0929 10:18:18.446568 4779 scope.go:117] "RemoveContainer" containerID="7e94feeaf09eb3e23db0815db4ab185b773bbe4005cd287dad1a5d3b803eb0f9" Sep 29 10:18:18 crc kubenswrapper[4779]: E0929 10:18:18.446896 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7e94feeaf09eb3e23db0815db4ab185b773bbe4005cd287dad1a5d3b803eb0f9\": container with ID starting with 7e94feeaf09eb3e23db0815db4ab185b773bbe4005cd287dad1a5d3b803eb0f9 not found: ID does not exist" containerID="7e94feeaf09eb3e23db0815db4ab185b773bbe4005cd287dad1a5d3b803eb0f9" Sep 29 10:18:18 crc kubenswrapper[4779]: I0929 10:18:18.446994 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7e94feeaf09eb3e23db0815db4ab185b773bbe4005cd287dad1a5d3b803eb0f9"} err="failed to get container status \"7e94feeaf09eb3e23db0815db4ab185b773bbe4005cd287dad1a5d3b803eb0f9\": rpc error: code = NotFound desc = could not find container \"7e94feeaf09eb3e23db0815db4ab185b773bbe4005cd287dad1a5d3b803eb0f9\": container with ID starting with 7e94feeaf09eb3e23db0815db4ab185b773bbe4005cd287dad1a5d3b803eb0f9 not found: ID does not exist" Sep 29 10:18:18 crc kubenswrapper[4779]: I0929 10:18:18.729522 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3f15c93d-9d62-4a71-82bf-d198e3bbefc6" path="/var/lib/kubelet/pods/3f15c93d-9d62-4a71-82bf-d198e3bbefc6/volumes" Sep 29 10:18:32 crc kubenswrapper[4779]: I0929 10:18:32.471255 4779 generic.go:334] "Generic (PLEG): container finished" podID="b328d4c8-0d40-4d1c-ade1-469e292e6d0e" containerID="d97de09f3756b10ed4aba840dd6d4c0df64172a543aa0a33139a5824237a2879" exitCode=0 Sep 29 10:18:32 crc kubenswrapper[4779]: I0929 10:18:32.471360 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ch2sw" event={"ID":"b328d4c8-0d40-4d1c-ade1-469e292e6d0e","Type":"ContainerDied","Data":"d97de09f3756b10ed4aba840dd6d4c0df64172a543aa0a33139a5824237a2879"} Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.115774 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ch2sw" Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.184424 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6czcc\" (UniqueName: \"kubernetes.io/projected/b328d4c8-0d40-4d1c-ade1-469e292e6d0e-kube-api-access-6czcc\") pod \"b328d4c8-0d40-4d1c-ade1-469e292e6d0e\" (UID: \"b328d4c8-0d40-4d1c-ade1-469e292e6d0e\") " Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.184720 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/b328d4c8-0d40-4d1c-ade1-469e292e6d0e-libvirt-secret-0\") pod \"b328d4c8-0d40-4d1c-ade1-469e292e6d0e\" (UID: \"b328d4c8-0d40-4d1c-ade1-469e292e6d0e\") " Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.184820 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b328d4c8-0d40-4d1c-ade1-469e292e6d0e-ceph\") pod \"b328d4c8-0d40-4d1c-ade1-469e292e6d0e\" (UID: \"b328d4c8-0d40-4d1c-ade1-469e292e6d0e\") " Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.184977 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b328d4c8-0d40-4d1c-ade1-469e292e6d0e-ssh-key\") pod \"b328d4c8-0d40-4d1c-ade1-469e292e6d0e\" (UID: \"b328d4c8-0d40-4d1c-ade1-469e292e6d0e\") " Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.185056 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b328d4c8-0d40-4d1c-ade1-469e292e6d0e-inventory\") pod \"b328d4c8-0d40-4d1c-ade1-469e292e6d0e\" (UID: \"b328d4c8-0d40-4d1c-ade1-469e292e6d0e\") " Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.185171 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b328d4c8-0d40-4d1c-ade1-469e292e6d0e-libvirt-combined-ca-bundle\") pod \"b328d4c8-0d40-4d1c-ade1-469e292e6d0e\" (UID: \"b328d4c8-0d40-4d1c-ade1-469e292e6d0e\") " Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.194027 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b328d4c8-0d40-4d1c-ade1-469e292e6d0e-kube-api-access-6czcc" (OuterVolumeSpecName: "kube-api-access-6czcc") pod "b328d4c8-0d40-4d1c-ade1-469e292e6d0e" (UID: "b328d4c8-0d40-4d1c-ade1-469e292e6d0e"). InnerVolumeSpecName "kube-api-access-6czcc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.194176 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b328d4c8-0d40-4d1c-ade1-469e292e6d0e-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "b328d4c8-0d40-4d1c-ade1-469e292e6d0e" (UID: "b328d4c8-0d40-4d1c-ade1-469e292e6d0e"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.196788 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b328d4c8-0d40-4d1c-ade1-469e292e6d0e-ceph" (OuterVolumeSpecName: "ceph") pod "b328d4c8-0d40-4d1c-ade1-469e292e6d0e" (UID: "b328d4c8-0d40-4d1c-ade1-469e292e6d0e"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.236466 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b328d4c8-0d40-4d1c-ade1-469e292e6d0e-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "b328d4c8-0d40-4d1c-ade1-469e292e6d0e" (UID: "b328d4c8-0d40-4d1c-ade1-469e292e6d0e"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.238356 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b328d4c8-0d40-4d1c-ade1-469e292e6d0e-inventory" (OuterVolumeSpecName: "inventory") pod "b328d4c8-0d40-4d1c-ade1-469e292e6d0e" (UID: "b328d4c8-0d40-4d1c-ade1-469e292e6d0e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.240434 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b328d4c8-0d40-4d1c-ade1-469e292e6d0e-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "b328d4c8-0d40-4d1c-ade1-469e292e6d0e" (UID: "b328d4c8-0d40-4d1c-ade1-469e292e6d0e"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.289538 4779 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/b328d4c8-0d40-4d1c-ade1-469e292e6d0e-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.289593 4779 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b328d4c8-0d40-4d1c-ade1-469e292e6d0e-ceph\") on node \"crc\" DevicePath \"\"" Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.289607 4779 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b328d4c8-0d40-4d1c-ade1-469e292e6d0e-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.289617 4779 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b328d4c8-0d40-4d1c-ade1-469e292e6d0e-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.289627 4779 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b328d4c8-0d40-4d1c-ade1-469e292e6d0e-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.289641 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6czcc\" (UniqueName: \"kubernetes.io/projected/b328d4c8-0d40-4d1c-ade1-469e292e6d0e-kube-api-access-6czcc\") on node \"crc\" DevicePath \"\"" Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.498812 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ch2sw" event={"ID":"b328d4c8-0d40-4d1c-ade1-469e292e6d0e","Type":"ContainerDied","Data":"7900adf376174dd4d73d18a384a2c2539525d07464522c5addc8f5d76f74386a"} Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.498863 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7900adf376174dd4d73d18a384a2c2539525d07464522c5addc8f5d76f74386a" Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.498895 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ch2sw" Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.682053 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c"] Sep 29 10:18:34 crc kubenswrapper[4779]: E0929 10:18:34.682718 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f15c93d-9d62-4a71-82bf-d198e3bbefc6" containerName="registry-server" Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.682745 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f15c93d-9d62-4a71-82bf-d198e3bbefc6" containerName="registry-server" Sep 29 10:18:34 crc kubenswrapper[4779]: E0929 10:18:34.682787 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b328d4c8-0d40-4d1c-ade1-469e292e6d0e" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.682800 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="b328d4c8-0d40-4d1c-ade1-469e292e6d0e" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Sep 29 10:18:34 crc kubenswrapper[4779]: E0929 10:18:34.682816 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f15c93d-9d62-4a71-82bf-d198e3bbefc6" containerName="extract-content" Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.682825 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f15c93d-9d62-4a71-82bf-d198e3bbefc6" containerName="extract-content" Sep 29 10:18:34 crc kubenswrapper[4779]: E0929 10:18:34.682839 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f15c93d-9d62-4a71-82bf-d198e3bbefc6" containerName="extract-utilities" Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.682847 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f15c93d-9d62-4a71-82bf-d198e3bbefc6" containerName="extract-utilities" Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.683140 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f15c93d-9d62-4a71-82bf-d198e3bbefc6" containerName="registry-server" Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.683167 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="b328d4c8-0d40-4d1c-ade1-469e292e6d0e" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.684460 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c" Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.687969 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.688807 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.688795 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.688914 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.689339 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.691417 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ceph-nova" Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.692602 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.692628 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.693518 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-l7nsb" Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.698753 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c"] Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.799310 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-ceph\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c\" (UID: \"f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c" Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.799377 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph-nova-0\" (UniqueName: \"kubernetes.io/configmap/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-ceph-nova-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c\" (UID: \"f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c" Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.799419 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-nova-migration-ssh-key-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c\" (UID: \"f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c" Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.799502 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-custom-ceph-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-nova-custom-ceph-combined-ca-bundle\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c\" (UID: \"f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c" Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.799596 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-nova-migration-ssh-key-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c\" (UID: \"f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c" Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.799614 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-ssh-key\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c\" (UID: \"f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c" Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.799651 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-nova-cell1-compute-config-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c\" (UID: \"f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c" Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.799714 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-nova-cell1-compute-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c\" (UID: \"f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c" Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.799768 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-nova-extra-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c\" (UID: \"f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c" Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.799801 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-inventory\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c\" (UID: \"f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c" Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.799857 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f2m4s\" (UniqueName: \"kubernetes.io/projected/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-kube-api-access-f2m4s\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c\" (UID: \"f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c" Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.902193 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f2m4s\" (UniqueName: \"kubernetes.io/projected/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-kube-api-access-f2m4s\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c\" (UID: \"f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c" Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.902763 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-ceph\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c\" (UID: \"f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c" Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.903523 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph-nova-0\" (UniqueName: \"kubernetes.io/configmap/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-ceph-nova-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c\" (UID: \"f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c" Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.903570 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-nova-migration-ssh-key-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c\" (UID: \"f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c" Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.903729 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-custom-ceph-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-nova-custom-ceph-combined-ca-bundle\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c\" (UID: \"f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c" Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.904312 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-nova-migration-ssh-key-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c\" (UID: \"f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c" Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.904344 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-ssh-key\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c\" (UID: \"f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c" Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.904426 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-nova-cell1-compute-config-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c\" (UID: \"f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c" Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.904507 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-nova-cell1-compute-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c\" (UID: \"f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c" Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.904560 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-nova-extra-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c\" (UID: \"f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c" Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.904568 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph-nova-0\" (UniqueName: \"kubernetes.io/configmap/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-ceph-nova-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c\" (UID: \"f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c" Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.904592 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-inventory\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c\" (UID: \"f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c" Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.905563 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-nova-extra-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c\" (UID: \"f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c" Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.908455 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-nova-cell1-compute-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c\" (UID: \"f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c" Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.908522 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-ceph\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c\" (UID: \"f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c" Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.909168 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-nova-migration-ssh-key-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c\" (UID: \"f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c" Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.909335 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-inventory\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c\" (UID: \"f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c" Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.909333 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-nova-cell1-compute-config-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c\" (UID: \"f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c" Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.911532 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-nova-migration-ssh-key-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c\" (UID: \"f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c" Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.912383 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-ssh-key\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c\" (UID: \"f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c" Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.920635 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-custom-ceph-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-nova-custom-ceph-combined-ca-bundle\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c\" (UID: \"f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c" Sep 29 10:18:34 crc kubenswrapper[4779]: I0929 10:18:34.924053 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f2m4s\" (UniqueName: \"kubernetes.io/projected/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-kube-api-access-f2m4s\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c\" (UID: \"f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c" Sep 29 10:18:35 crc kubenswrapper[4779]: I0929 10:18:35.006564 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c" Sep 29 10:18:35 crc kubenswrapper[4779]: I0929 10:18:35.591599 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c"] Sep 29 10:18:35 crc kubenswrapper[4779]: W0929 10:18:35.613405 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf1c042f0_5d85_45db_bd0d_42a9ab1dfcdd.slice/crio-bbe9889112363b03c9e9f00b9d38b8fcd874cdfa0eb9530c74456253c51c64a1 WatchSource:0}: Error finding container bbe9889112363b03c9e9f00b9d38b8fcd874cdfa0eb9530c74456253c51c64a1: Status 404 returned error can't find the container with id bbe9889112363b03c9e9f00b9d38b8fcd874cdfa0eb9530c74456253c51c64a1 Sep 29 10:18:36 crc kubenswrapper[4779]: I0929 10:18:36.524392 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c" event={"ID":"f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd","Type":"ContainerStarted","Data":"76a66c07e14d5a26b0d4ecc205a4210fb4886174b75ce49c34a73fb9c7d19d27"} Sep 29 10:18:36 crc kubenswrapper[4779]: I0929 10:18:36.525032 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c" event={"ID":"f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd","Type":"ContainerStarted","Data":"bbe9889112363b03c9e9f00b9d38b8fcd874cdfa0eb9530c74456253c51c64a1"} Sep 29 10:18:36 crc kubenswrapper[4779]: I0929 10:18:36.545317 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c" podStartSLOduration=2.078854995 podStartE2EDuration="2.545294391s" podCreationTimestamp="2025-09-29 10:18:34 +0000 UTC" firstStartedPulling="2025-09-29 10:18:35.616636723 +0000 UTC m=+2947.597960627" lastFinishedPulling="2025-09-29 10:18:36.083076109 +0000 UTC m=+2948.064400023" observedRunningTime="2025-09-29 10:18:36.543642573 +0000 UTC m=+2948.524966517" watchObservedRunningTime="2025-09-29 10:18:36.545294391 +0000 UTC m=+2948.526618295" Sep 29 10:20:46 crc kubenswrapper[4779]: I0929 10:20:46.966560 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 10:20:46 crc kubenswrapper[4779]: I0929 10:20:46.967406 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 10:21:16 crc kubenswrapper[4779]: I0929 10:21:16.966517 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 10:21:16 crc kubenswrapper[4779]: I0929 10:21:16.967456 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 10:21:46 crc kubenswrapper[4779]: I0929 10:21:46.966112 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 10:21:46 crc kubenswrapper[4779]: I0929 10:21:46.966980 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 10:21:46 crc kubenswrapper[4779]: I0929 10:21:46.967048 4779 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" Sep 29 10:21:46 crc kubenswrapper[4779]: I0929 10:21:46.968062 4779 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a993e5a0edbe6448eaf61b4b45ffde78e59ae455e408d7dd1afa5a0ff12dc338"} pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 10:21:46 crc kubenswrapper[4779]: I0929 10:21:46.968135 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" containerID="cri-o://a993e5a0edbe6448eaf61b4b45ffde78e59ae455e408d7dd1afa5a0ff12dc338" gracePeriod=600 Sep 29 10:21:47 crc kubenswrapper[4779]: E0929 10:21:47.099934 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:21:47 crc kubenswrapper[4779]: E0929 10:21:47.182272 4779 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf1a5d3a7_37d9_4a87_864c_e4af7f504a19.slice/crio-conmon-a993e5a0edbe6448eaf61b4b45ffde78e59ae455e408d7dd1afa5a0ff12dc338.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf1a5d3a7_37d9_4a87_864c_e4af7f504a19.slice/crio-a993e5a0edbe6448eaf61b4b45ffde78e59ae455e408d7dd1afa5a0ff12dc338.scope\": RecentStats: unable to find data in memory cache]" Sep 29 10:21:47 crc kubenswrapper[4779]: I0929 10:21:47.799805 4779 generic.go:334] "Generic (PLEG): container finished" podID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerID="a993e5a0edbe6448eaf61b4b45ffde78e59ae455e408d7dd1afa5a0ff12dc338" exitCode=0 Sep 29 10:21:47 crc kubenswrapper[4779]: I0929 10:21:47.799981 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" event={"ID":"f1a5d3a7-37d9-4a87-864c-e4af7f504a19","Type":"ContainerDied","Data":"a993e5a0edbe6448eaf61b4b45ffde78e59ae455e408d7dd1afa5a0ff12dc338"} Sep 29 10:21:47 crc kubenswrapper[4779]: I0929 10:21:47.803683 4779 scope.go:117] "RemoveContainer" containerID="9da53dcd1003940833ba3cee687f77a5c20c7a240309846ec86bc0f5cfe14e26" Sep 29 10:21:47 crc kubenswrapper[4779]: I0929 10:21:47.803895 4779 scope.go:117] "RemoveContainer" containerID="a993e5a0edbe6448eaf61b4b45ffde78e59ae455e408d7dd1afa5a0ff12dc338" Sep 29 10:21:47 crc kubenswrapper[4779]: E0929 10:21:47.804543 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:22:01 crc kubenswrapper[4779]: I0929 10:22:01.714942 4779 scope.go:117] "RemoveContainer" containerID="a993e5a0edbe6448eaf61b4b45ffde78e59ae455e408d7dd1afa5a0ff12dc338" Sep 29 10:22:01 crc kubenswrapper[4779]: E0929 10:22:01.716166 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:22:14 crc kubenswrapper[4779]: I0929 10:22:14.715087 4779 scope.go:117] "RemoveContainer" containerID="a993e5a0edbe6448eaf61b4b45ffde78e59ae455e408d7dd1afa5a0ff12dc338" Sep 29 10:22:14 crc kubenswrapper[4779]: E0929 10:22:14.716229 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:22:26 crc kubenswrapper[4779]: I0929 10:22:26.715887 4779 scope.go:117] "RemoveContainer" containerID="a993e5a0edbe6448eaf61b4b45ffde78e59ae455e408d7dd1afa5a0ff12dc338" Sep 29 10:22:26 crc kubenswrapper[4779]: E0929 10:22:26.717323 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:22:33 crc kubenswrapper[4779]: I0929 10:22:33.282403 4779 generic.go:334] "Generic (PLEG): container finished" podID="f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd" containerID="76a66c07e14d5a26b0d4ecc205a4210fb4886174b75ce49c34a73fb9c7d19d27" exitCode=0 Sep 29 10:22:33 crc kubenswrapper[4779]: I0929 10:22:33.282538 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c" event={"ID":"f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd","Type":"ContainerDied","Data":"76a66c07e14d5a26b0d4ecc205a4210fb4886174b75ce49c34a73fb9c7d19d27"} Sep 29 10:22:34 crc kubenswrapper[4779]: I0929 10:22:34.786108 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c" Sep 29 10:22:34 crc kubenswrapper[4779]: I0929 10:22:34.938202 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-ceph\") pod \"f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd\" (UID: \"f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd\") " Sep 29 10:22:34 crc kubenswrapper[4779]: I0929 10:22:34.938276 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-nova-migration-ssh-key-1\") pod \"f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd\" (UID: \"f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd\") " Sep 29 10:22:34 crc kubenswrapper[4779]: I0929 10:22:34.938308 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-nova-cell1-compute-config-1\") pod \"f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd\" (UID: \"f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd\") " Sep 29 10:22:34 crc kubenswrapper[4779]: I0929 10:22:34.938335 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-nova-migration-ssh-key-0\") pod \"f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd\" (UID: \"f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd\") " Sep 29 10:22:34 crc kubenswrapper[4779]: I0929 10:22:34.938420 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f2m4s\" (UniqueName: \"kubernetes.io/projected/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-kube-api-access-f2m4s\") pod \"f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd\" (UID: \"f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd\") " Sep 29 10:22:34 crc kubenswrapper[4779]: I0929 10:22:34.938467 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-ssh-key\") pod \"f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd\" (UID: \"f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd\") " Sep 29 10:22:34 crc kubenswrapper[4779]: I0929 10:22:34.938493 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph-nova-0\" (UniqueName: \"kubernetes.io/configmap/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-ceph-nova-0\") pod \"f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd\" (UID: \"f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd\") " Sep 29 10:22:34 crc kubenswrapper[4779]: I0929 10:22:34.938572 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-nova-cell1-compute-config-0\") pod \"f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd\" (UID: \"f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd\") " Sep 29 10:22:34 crc kubenswrapper[4779]: I0929 10:22:34.938732 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-custom-ceph-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-nova-custom-ceph-combined-ca-bundle\") pod \"f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd\" (UID: \"f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd\") " Sep 29 10:22:34 crc kubenswrapper[4779]: I0929 10:22:34.938801 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-nova-extra-config-0\") pod \"f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd\" (UID: \"f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd\") " Sep 29 10:22:34 crc kubenswrapper[4779]: I0929 10:22:34.938881 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-inventory\") pod \"f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd\" (UID: \"f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd\") " Sep 29 10:22:34 crc kubenswrapper[4779]: I0929 10:22:34.946659 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-nova-custom-ceph-combined-ca-bundle" (OuterVolumeSpecName: "nova-custom-ceph-combined-ca-bundle") pod "f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd" (UID: "f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd"). InnerVolumeSpecName "nova-custom-ceph-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:22:34 crc kubenswrapper[4779]: I0929 10:22:34.947700 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-kube-api-access-f2m4s" (OuterVolumeSpecName: "kube-api-access-f2m4s") pod "f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd" (UID: "f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd"). InnerVolumeSpecName "kube-api-access-f2m4s". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 10:22:34 crc kubenswrapper[4779]: I0929 10:22:34.948461 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-ceph" (OuterVolumeSpecName: "ceph") pod "f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd" (UID: "f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:22:34 crc kubenswrapper[4779]: I0929 10:22:34.979482 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd" (UID: "f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:22:34 crc kubenswrapper[4779]: I0929 10:22:34.983475 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd" (UID: "f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 10:22:34 crc kubenswrapper[4779]: I0929 10:22:34.984042 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd" (UID: "f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:22:34 crc kubenswrapper[4779]: I0929 10:22:34.984818 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-ceph-nova-0" (OuterVolumeSpecName: "ceph-nova-0") pod "f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd" (UID: "f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd"). InnerVolumeSpecName "ceph-nova-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 10:22:34 crc kubenswrapper[4779]: I0929 10:22:34.988876 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd" (UID: "f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:22:34 crc kubenswrapper[4779]: I0929 10:22:34.989171 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd" (UID: "f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:22:34 crc kubenswrapper[4779]: I0929 10:22:34.997434 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd" (UID: "f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:22:35 crc kubenswrapper[4779]: I0929 10:22:35.001256 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-inventory" (OuterVolumeSpecName: "inventory") pod "f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd" (UID: "f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:22:35 crc kubenswrapper[4779]: I0929 10:22:35.041592 4779 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Sep 29 10:22:35 crc kubenswrapper[4779]: I0929 10:22:35.041639 4779 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Sep 29 10:22:35 crc kubenswrapper[4779]: I0929 10:22:35.041649 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f2m4s\" (UniqueName: \"kubernetes.io/projected/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-kube-api-access-f2m4s\") on node \"crc\" DevicePath \"\"" Sep 29 10:22:35 crc kubenswrapper[4779]: I0929 10:22:35.041659 4779 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 10:22:35 crc kubenswrapper[4779]: I0929 10:22:35.041669 4779 reconciler_common.go:293] "Volume detached for volume \"ceph-nova-0\" (UniqueName: \"kubernetes.io/configmap/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-ceph-nova-0\") on node \"crc\" DevicePath \"\"" Sep 29 10:22:35 crc kubenswrapper[4779]: I0929 10:22:35.041681 4779 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Sep 29 10:22:35 crc kubenswrapper[4779]: I0929 10:22:35.041690 4779 reconciler_common.go:293] "Volume detached for volume \"nova-custom-ceph-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-nova-custom-ceph-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 10:22:35 crc kubenswrapper[4779]: I0929 10:22:35.041701 4779 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Sep 29 10:22:35 crc kubenswrapper[4779]: I0929 10:22:35.041713 4779 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 10:22:35 crc kubenswrapper[4779]: I0929 10:22:35.041720 4779 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-ceph\") on node \"crc\" DevicePath \"\"" Sep 29 10:22:35 crc kubenswrapper[4779]: I0929 10:22:35.041728 4779 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Sep 29 10:22:35 crc kubenswrapper[4779]: I0929 10:22:35.309493 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c" event={"ID":"f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd","Type":"ContainerDied","Data":"bbe9889112363b03c9e9f00b9d38b8fcd874cdfa0eb9530c74456253c51c64a1"} Sep 29 10:22:35 crc kubenswrapper[4779]: I0929 10:22:35.309552 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bbe9889112363b03c9e9f00b9d38b8fcd874cdfa0eb9530c74456253c51c64a1" Sep 29 10:22:35 crc kubenswrapper[4779]: I0929 10:22:35.309585 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c" Sep 29 10:22:35 crc kubenswrapper[4779]: I0929 10:22:35.477220 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-wfpn9"] Sep 29 10:22:35 crc kubenswrapper[4779]: E0929 10:22:35.477821 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd" containerName="nova-custom-ceph-edpm-deployment-openstack-edpm-ipam" Sep 29 10:22:35 crc kubenswrapper[4779]: I0929 10:22:35.477848 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd" containerName="nova-custom-ceph-edpm-deployment-openstack-edpm-ipam" Sep 29 10:22:35 crc kubenswrapper[4779]: I0929 10:22:35.478139 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd" containerName="nova-custom-ceph-edpm-deployment-openstack-edpm-ipam" Sep 29 10:22:35 crc kubenswrapper[4779]: I0929 10:22:35.479162 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-wfpn9" Sep 29 10:22:35 crc kubenswrapper[4779]: I0929 10:22:35.481734 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Sep 29 10:22:35 crc kubenswrapper[4779]: I0929 10:22:35.482011 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 10:22:35 crc kubenswrapper[4779]: I0929 10:22:35.482278 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 10:22:35 crc kubenswrapper[4779]: I0929 10:22:35.482677 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-l7nsb" Sep 29 10:22:35 crc kubenswrapper[4779]: I0929 10:22:35.484111 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Sep 29 10:22:35 crc kubenswrapper[4779]: I0929 10:22:35.485688 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 10:22:35 crc kubenswrapper[4779]: I0929 10:22:35.496472 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-wfpn9"] Sep 29 10:22:35 crc kubenswrapper[4779]: I0929 10:22:35.562807 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/70c4a839-8e6a-43d0-8204-550b989527e9-ceph\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-wfpn9\" (UID: \"70c4a839-8e6a-43d0-8204-550b989527e9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-wfpn9" Sep 29 10:22:35 crc kubenswrapper[4779]: I0929 10:22:35.563429 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/70c4a839-8e6a-43d0-8204-550b989527e9-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-wfpn9\" (UID: \"70c4a839-8e6a-43d0-8204-550b989527e9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-wfpn9" Sep 29 10:22:35 crc kubenswrapper[4779]: I0929 10:22:35.563507 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/70c4a839-8e6a-43d0-8204-550b989527e9-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-wfpn9\" (UID: \"70c4a839-8e6a-43d0-8204-550b989527e9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-wfpn9" Sep 29 10:22:35 crc kubenswrapper[4779]: I0929 10:22:35.563580 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70c4a839-8e6a-43d0-8204-550b989527e9-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-wfpn9\" (UID: \"70c4a839-8e6a-43d0-8204-550b989527e9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-wfpn9" Sep 29 10:22:35 crc kubenswrapper[4779]: I0929 10:22:35.563652 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/70c4a839-8e6a-43d0-8204-550b989527e9-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-wfpn9\" (UID: \"70c4a839-8e6a-43d0-8204-550b989527e9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-wfpn9" Sep 29 10:22:35 crc kubenswrapper[4779]: I0929 10:22:35.563742 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/70c4a839-8e6a-43d0-8204-550b989527e9-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-wfpn9\" (UID: \"70c4a839-8e6a-43d0-8204-550b989527e9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-wfpn9" Sep 29 10:22:35 crc kubenswrapper[4779]: I0929 10:22:35.563803 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/70c4a839-8e6a-43d0-8204-550b989527e9-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-wfpn9\" (UID: \"70c4a839-8e6a-43d0-8204-550b989527e9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-wfpn9" Sep 29 10:22:35 crc kubenswrapper[4779]: I0929 10:22:35.563828 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ffbsm\" (UniqueName: \"kubernetes.io/projected/70c4a839-8e6a-43d0-8204-550b989527e9-kube-api-access-ffbsm\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-wfpn9\" (UID: \"70c4a839-8e6a-43d0-8204-550b989527e9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-wfpn9" Sep 29 10:22:35 crc kubenswrapper[4779]: I0929 10:22:35.666030 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/70c4a839-8e6a-43d0-8204-550b989527e9-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-wfpn9\" (UID: \"70c4a839-8e6a-43d0-8204-550b989527e9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-wfpn9" Sep 29 10:22:35 crc kubenswrapper[4779]: I0929 10:22:35.666110 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70c4a839-8e6a-43d0-8204-550b989527e9-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-wfpn9\" (UID: \"70c4a839-8e6a-43d0-8204-550b989527e9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-wfpn9" Sep 29 10:22:35 crc kubenswrapper[4779]: I0929 10:22:35.666157 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/70c4a839-8e6a-43d0-8204-550b989527e9-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-wfpn9\" (UID: \"70c4a839-8e6a-43d0-8204-550b989527e9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-wfpn9" Sep 29 10:22:35 crc kubenswrapper[4779]: I0929 10:22:35.666211 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/70c4a839-8e6a-43d0-8204-550b989527e9-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-wfpn9\" (UID: \"70c4a839-8e6a-43d0-8204-550b989527e9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-wfpn9" Sep 29 10:22:35 crc kubenswrapper[4779]: I0929 10:22:35.666246 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/70c4a839-8e6a-43d0-8204-550b989527e9-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-wfpn9\" (UID: \"70c4a839-8e6a-43d0-8204-550b989527e9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-wfpn9" Sep 29 10:22:35 crc kubenswrapper[4779]: I0929 10:22:35.666268 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ffbsm\" (UniqueName: \"kubernetes.io/projected/70c4a839-8e6a-43d0-8204-550b989527e9-kube-api-access-ffbsm\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-wfpn9\" (UID: \"70c4a839-8e6a-43d0-8204-550b989527e9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-wfpn9" Sep 29 10:22:35 crc kubenswrapper[4779]: I0929 10:22:35.666329 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/70c4a839-8e6a-43d0-8204-550b989527e9-ceph\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-wfpn9\" (UID: \"70c4a839-8e6a-43d0-8204-550b989527e9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-wfpn9" Sep 29 10:22:35 crc kubenswrapper[4779]: I0929 10:22:35.666363 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/70c4a839-8e6a-43d0-8204-550b989527e9-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-wfpn9\" (UID: \"70c4a839-8e6a-43d0-8204-550b989527e9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-wfpn9" Sep 29 10:22:35 crc kubenswrapper[4779]: I0929 10:22:35.673491 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/70c4a839-8e6a-43d0-8204-550b989527e9-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-wfpn9\" (UID: \"70c4a839-8e6a-43d0-8204-550b989527e9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-wfpn9" Sep 29 10:22:35 crc kubenswrapper[4779]: I0929 10:22:35.673531 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/70c4a839-8e6a-43d0-8204-550b989527e9-ceph\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-wfpn9\" (UID: \"70c4a839-8e6a-43d0-8204-550b989527e9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-wfpn9" Sep 29 10:22:35 crc kubenswrapper[4779]: I0929 10:22:35.673886 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/70c4a839-8e6a-43d0-8204-550b989527e9-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-wfpn9\" (UID: \"70c4a839-8e6a-43d0-8204-550b989527e9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-wfpn9" Sep 29 10:22:35 crc kubenswrapper[4779]: I0929 10:22:35.675058 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70c4a839-8e6a-43d0-8204-550b989527e9-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-wfpn9\" (UID: \"70c4a839-8e6a-43d0-8204-550b989527e9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-wfpn9" Sep 29 10:22:35 crc kubenswrapper[4779]: I0929 10:22:35.675117 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/70c4a839-8e6a-43d0-8204-550b989527e9-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-wfpn9\" (UID: \"70c4a839-8e6a-43d0-8204-550b989527e9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-wfpn9" Sep 29 10:22:35 crc kubenswrapper[4779]: I0929 10:22:35.676039 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/70c4a839-8e6a-43d0-8204-550b989527e9-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-wfpn9\" (UID: \"70c4a839-8e6a-43d0-8204-550b989527e9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-wfpn9" Sep 29 10:22:35 crc kubenswrapper[4779]: I0929 10:22:35.676447 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/70c4a839-8e6a-43d0-8204-550b989527e9-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-wfpn9\" (UID: \"70c4a839-8e6a-43d0-8204-550b989527e9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-wfpn9" Sep 29 10:22:35 crc kubenswrapper[4779]: I0929 10:22:35.686317 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ffbsm\" (UniqueName: \"kubernetes.io/projected/70c4a839-8e6a-43d0-8204-550b989527e9-kube-api-access-ffbsm\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-wfpn9\" (UID: \"70c4a839-8e6a-43d0-8204-550b989527e9\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-wfpn9" Sep 29 10:22:35 crc kubenswrapper[4779]: I0929 10:22:35.855154 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-wfpn9" Sep 29 10:22:36 crc kubenswrapper[4779]: I0929 10:22:36.447436 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-wfpn9"] Sep 29 10:22:36 crc kubenswrapper[4779]: I0929 10:22:36.460388 4779 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 10:22:37 crc kubenswrapper[4779]: I0929 10:22:37.359732 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-wfpn9" event={"ID":"70c4a839-8e6a-43d0-8204-550b989527e9","Type":"ContainerStarted","Data":"71fa402e1f27c6cec6f9d240abf8d19b93bedfee00b0c5c383f995975f68d9a1"} Sep 29 10:22:38 crc kubenswrapper[4779]: I0929 10:22:38.373318 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-wfpn9" event={"ID":"70c4a839-8e6a-43d0-8204-550b989527e9","Type":"ContainerStarted","Data":"9b75bc9bc0815c09fe4e4c7c38614556fb10a7abc5c7cab8d12a2b45bed2d591"} Sep 29 10:22:38 crc kubenswrapper[4779]: I0929 10:22:38.403167 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-wfpn9" podStartSLOduration=2.474755304 podStartE2EDuration="3.403147224s" podCreationTimestamp="2025-09-29 10:22:35 +0000 UTC" firstStartedPulling="2025-09-29 10:22:36.459484777 +0000 UTC m=+3188.440808691" lastFinishedPulling="2025-09-29 10:22:37.387876697 +0000 UTC m=+3189.369200611" observedRunningTime="2025-09-29 10:22:38.398215271 +0000 UTC m=+3190.379539215" watchObservedRunningTime="2025-09-29 10:22:38.403147224 +0000 UTC m=+3190.384471118" Sep 29 10:22:40 crc kubenswrapper[4779]: I0929 10:22:40.715487 4779 scope.go:117] "RemoveContainer" containerID="a993e5a0edbe6448eaf61b4b45ffde78e59ae455e408d7dd1afa5a0ff12dc338" Sep 29 10:22:40 crc kubenswrapper[4779]: E0929 10:22:40.716206 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:22:53 crc kubenswrapper[4779]: I0929 10:22:53.715504 4779 scope.go:117] "RemoveContainer" containerID="a993e5a0edbe6448eaf61b4b45ffde78e59ae455e408d7dd1afa5a0ff12dc338" Sep 29 10:22:53 crc kubenswrapper[4779]: E0929 10:22:53.716566 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:23:08 crc kubenswrapper[4779]: I0929 10:23:08.725855 4779 scope.go:117] "RemoveContainer" containerID="a993e5a0edbe6448eaf61b4b45ffde78e59ae455e408d7dd1afa5a0ff12dc338" Sep 29 10:23:08 crc kubenswrapper[4779]: E0929 10:23:08.727005 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:23:19 crc kubenswrapper[4779]: I0929 10:23:19.715830 4779 scope.go:117] "RemoveContainer" containerID="a993e5a0edbe6448eaf61b4b45ffde78e59ae455e408d7dd1afa5a0ff12dc338" Sep 29 10:23:19 crc kubenswrapper[4779]: E0929 10:23:19.717026 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:23:31 crc kubenswrapper[4779]: I0929 10:23:31.715119 4779 scope.go:117] "RemoveContainer" containerID="a993e5a0edbe6448eaf61b4b45ffde78e59ae455e408d7dd1afa5a0ff12dc338" Sep 29 10:23:31 crc kubenswrapper[4779]: E0929 10:23:31.716127 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:23:45 crc kubenswrapper[4779]: I0929 10:23:45.714631 4779 scope.go:117] "RemoveContainer" containerID="a993e5a0edbe6448eaf61b4b45ffde78e59ae455e408d7dd1afa5a0ff12dc338" Sep 29 10:23:45 crc kubenswrapper[4779]: E0929 10:23:45.715500 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:23:57 crc kubenswrapper[4779]: I0929 10:23:57.714570 4779 scope.go:117] "RemoveContainer" containerID="a993e5a0edbe6448eaf61b4b45ffde78e59ae455e408d7dd1afa5a0ff12dc338" Sep 29 10:23:57 crc kubenswrapper[4779]: E0929 10:23:57.715649 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:24:10 crc kubenswrapper[4779]: I0929 10:24:10.716064 4779 scope.go:117] "RemoveContainer" containerID="a993e5a0edbe6448eaf61b4b45ffde78e59ae455e408d7dd1afa5a0ff12dc338" Sep 29 10:24:10 crc kubenswrapper[4779]: E0929 10:24:10.717254 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:24:23 crc kubenswrapper[4779]: I0929 10:24:23.714727 4779 scope.go:117] "RemoveContainer" containerID="a993e5a0edbe6448eaf61b4b45ffde78e59ae455e408d7dd1afa5a0ff12dc338" Sep 29 10:24:23 crc kubenswrapper[4779]: E0929 10:24:23.715602 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:24:38 crc kubenswrapper[4779]: I0929 10:24:38.725435 4779 scope.go:117] "RemoveContainer" containerID="a993e5a0edbe6448eaf61b4b45ffde78e59ae455e408d7dd1afa5a0ff12dc338" Sep 29 10:24:38 crc kubenswrapper[4779]: E0929 10:24:38.728278 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:24:50 crc kubenswrapper[4779]: I0929 10:24:50.714542 4779 scope.go:117] "RemoveContainer" containerID="a993e5a0edbe6448eaf61b4b45ffde78e59ae455e408d7dd1afa5a0ff12dc338" Sep 29 10:24:50 crc kubenswrapper[4779]: E0929 10:24:50.715849 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:25:05 crc kubenswrapper[4779]: I0929 10:25:05.714306 4779 scope.go:117] "RemoveContainer" containerID="a993e5a0edbe6448eaf61b4b45ffde78e59ae455e408d7dd1afa5a0ff12dc338" Sep 29 10:25:05 crc kubenswrapper[4779]: E0929 10:25:05.715252 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:25:09 crc kubenswrapper[4779]: I0929 10:25:09.075931 4779 generic.go:334] "Generic (PLEG): container finished" podID="70c4a839-8e6a-43d0-8204-550b989527e9" containerID="9b75bc9bc0815c09fe4e4c7c38614556fb10a7abc5c7cab8d12a2b45bed2d591" exitCode=0 Sep 29 10:25:09 crc kubenswrapper[4779]: I0929 10:25:09.076032 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-wfpn9" event={"ID":"70c4a839-8e6a-43d0-8204-550b989527e9","Type":"ContainerDied","Data":"9b75bc9bc0815c09fe4e4c7c38614556fb10a7abc5c7cab8d12a2b45bed2d591"} Sep 29 10:25:11 crc kubenswrapper[4779]: I0929 10:25:11.034474 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-wfpn9" Sep 29 10:25:11 crc kubenswrapper[4779]: I0929 10:25:11.108559 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-wfpn9" event={"ID":"70c4a839-8e6a-43d0-8204-550b989527e9","Type":"ContainerDied","Data":"71fa402e1f27c6cec6f9d240abf8d19b93bedfee00b0c5c383f995975f68d9a1"} Sep 29 10:25:11 crc kubenswrapper[4779]: I0929 10:25:11.108615 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="71fa402e1f27c6cec6f9d240abf8d19b93bedfee00b0c5c383f995975f68d9a1" Sep 29 10:25:11 crc kubenswrapper[4779]: I0929 10:25:11.108678 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-wfpn9" Sep 29 10:25:11 crc kubenswrapper[4779]: I0929 10:25:11.146494 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/70c4a839-8e6a-43d0-8204-550b989527e9-ceph\") pod \"70c4a839-8e6a-43d0-8204-550b989527e9\" (UID: \"70c4a839-8e6a-43d0-8204-550b989527e9\") " Sep 29 10:25:11 crc kubenswrapper[4779]: I0929 10:25:11.146614 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/70c4a839-8e6a-43d0-8204-550b989527e9-inventory\") pod \"70c4a839-8e6a-43d0-8204-550b989527e9\" (UID: \"70c4a839-8e6a-43d0-8204-550b989527e9\") " Sep 29 10:25:11 crc kubenswrapper[4779]: I0929 10:25:11.146704 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ffbsm\" (UniqueName: \"kubernetes.io/projected/70c4a839-8e6a-43d0-8204-550b989527e9-kube-api-access-ffbsm\") pod \"70c4a839-8e6a-43d0-8204-550b989527e9\" (UID: \"70c4a839-8e6a-43d0-8204-550b989527e9\") " Sep 29 10:25:11 crc kubenswrapper[4779]: I0929 10:25:11.146757 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/70c4a839-8e6a-43d0-8204-550b989527e9-ceilometer-compute-config-data-1\") pod \"70c4a839-8e6a-43d0-8204-550b989527e9\" (UID: \"70c4a839-8e6a-43d0-8204-550b989527e9\") " Sep 29 10:25:11 crc kubenswrapper[4779]: I0929 10:25:11.146866 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/70c4a839-8e6a-43d0-8204-550b989527e9-ssh-key\") pod \"70c4a839-8e6a-43d0-8204-550b989527e9\" (UID: \"70c4a839-8e6a-43d0-8204-550b989527e9\") " Sep 29 10:25:11 crc kubenswrapper[4779]: I0929 10:25:11.147058 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/70c4a839-8e6a-43d0-8204-550b989527e9-ceilometer-compute-config-data-2\") pod \"70c4a839-8e6a-43d0-8204-550b989527e9\" (UID: \"70c4a839-8e6a-43d0-8204-550b989527e9\") " Sep 29 10:25:11 crc kubenswrapper[4779]: I0929 10:25:11.147124 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/70c4a839-8e6a-43d0-8204-550b989527e9-ceilometer-compute-config-data-0\") pod \"70c4a839-8e6a-43d0-8204-550b989527e9\" (UID: \"70c4a839-8e6a-43d0-8204-550b989527e9\") " Sep 29 10:25:11 crc kubenswrapper[4779]: I0929 10:25:11.147182 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70c4a839-8e6a-43d0-8204-550b989527e9-telemetry-combined-ca-bundle\") pod \"70c4a839-8e6a-43d0-8204-550b989527e9\" (UID: \"70c4a839-8e6a-43d0-8204-550b989527e9\") " Sep 29 10:25:11 crc kubenswrapper[4779]: I0929 10:25:11.167116 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70c4a839-8e6a-43d0-8204-550b989527e9-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "70c4a839-8e6a-43d0-8204-550b989527e9" (UID: "70c4a839-8e6a-43d0-8204-550b989527e9"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:25:11 crc kubenswrapper[4779]: I0929 10:25:11.168652 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/70c4a839-8e6a-43d0-8204-550b989527e9-kube-api-access-ffbsm" (OuterVolumeSpecName: "kube-api-access-ffbsm") pod "70c4a839-8e6a-43d0-8204-550b989527e9" (UID: "70c4a839-8e6a-43d0-8204-550b989527e9"). InnerVolumeSpecName "kube-api-access-ffbsm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 10:25:11 crc kubenswrapper[4779]: I0929 10:25:11.171140 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70c4a839-8e6a-43d0-8204-550b989527e9-ceph" (OuterVolumeSpecName: "ceph") pod "70c4a839-8e6a-43d0-8204-550b989527e9" (UID: "70c4a839-8e6a-43d0-8204-550b989527e9"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:25:11 crc kubenswrapper[4779]: I0929 10:25:11.185014 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70c4a839-8e6a-43d0-8204-550b989527e9-inventory" (OuterVolumeSpecName: "inventory") pod "70c4a839-8e6a-43d0-8204-550b989527e9" (UID: "70c4a839-8e6a-43d0-8204-550b989527e9"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:25:11 crc kubenswrapper[4779]: I0929 10:25:11.195591 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70c4a839-8e6a-43d0-8204-550b989527e9-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "70c4a839-8e6a-43d0-8204-550b989527e9" (UID: "70c4a839-8e6a-43d0-8204-550b989527e9"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:25:11 crc kubenswrapper[4779]: I0929 10:25:11.206714 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70c4a839-8e6a-43d0-8204-550b989527e9-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "70c4a839-8e6a-43d0-8204-550b989527e9" (UID: "70c4a839-8e6a-43d0-8204-550b989527e9"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:25:11 crc kubenswrapper[4779]: I0929 10:25:11.207545 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70c4a839-8e6a-43d0-8204-550b989527e9-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "70c4a839-8e6a-43d0-8204-550b989527e9" (UID: "70c4a839-8e6a-43d0-8204-550b989527e9"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:25:11 crc kubenswrapper[4779]: I0929 10:25:11.210885 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70c4a839-8e6a-43d0-8204-550b989527e9-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "70c4a839-8e6a-43d0-8204-550b989527e9" (UID: "70c4a839-8e6a-43d0-8204-550b989527e9"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:25:11 crc kubenswrapper[4779]: I0929 10:25:11.250396 4779 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/70c4a839-8e6a-43d0-8204-550b989527e9-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Sep 29 10:25:11 crc kubenswrapper[4779]: I0929 10:25:11.250483 4779 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70c4a839-8e6a-43d0-8204-550b989527e9-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 10:25:11 crc kubenswrapper[4779]: I0929 10:25:11.250499 4779 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/70c4a839-8e6a-43d0-8204-550b989527e9-ceph\") on node \"crc\" DevicePath \"\"" Sep 29 10:25:11 crc kubenswrapper[4779]: I0929 10:25:11.250517 4779 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/70c4a839-8e6a-43d0-8204-550b989527e9-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 10:25:11 crc kubenswrapper[4779]: I0929 10:25:11.250531 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ffbsm\" (UniqueName: \"kubernetes.io/projected/70c4a839-8e6a-43d0-8204-550b989527e9-kube-api-access-ffbsm\") on node \"crc\" DevicePath \"\"" Sep 29 10:25:11 crc kubenswrapper[4779]: I0929 10:25:11.250543 4779 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/70c4a839-8e6a-43d0-8204-550b989527e9-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Sep 29 10:25:11 crc kubenswrapper[4779]: I0929 10:25:11.250558 4779 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/70c4a839-8e6a-43d0-8204-550b989527e9-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 10:25:11 crc kubenswrapper[4779]: I0929 10:25:11.250571 4779 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/70c4a839-8e6a-43d0-8204-550b989527e9-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Sep 29 10:25:14 crc kubenswrapper[4779]: E0929 10:25:14.848159 4779 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.30:35206->38.102.83.30:34385: write tcp 38.102.83.30:35206->38.102.83.30:34385: write: broken pipe Sep 29 10:25:19 crc kubenswrapper[4779]: I0929 10:25:19.714848 4779 scope.go:117] "RemoveContainer" containerID="a993e5a0edbe6448eaf61b4b45ffde78e59ae455e408d7dd1afa5a0ff12dc338" Sep 29 10:25:19 crc kubenswrapper[4779]: E0929 10:25:19.715619 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.200895 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-backup-0"] Sep 29 10:25:30 crc kubenswrapper[4779]: E0929 10:25:30.205053 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70c4a839-8e6a-43d0-8204-550b989527e9" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.205078 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="70c4a839-8e6a-43d0-8204-550b989527e9" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.205372 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="70c4a839-8e6a-43d0-8204-550b989527e9" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.206540 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.208923 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.209698 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-backup-config-data" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.223778 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.294787 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2\") " pod="openstack/cinder-backup-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.294929 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2\") " pod="openstack/cinder-backup-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.294956 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2-run\") pod \"cinder-backup-0\" (UID: \"405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2\") " pod="openstack/cinder-backup-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.294998 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g2g6c\" (UniqueName: \"kubernetes.io/projected/405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2-kube-api-access-g2g6c\") pod \"cinder-backup-0\" (UID: \"405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2\") " pod="openstack/cinder-backup-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.295033 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2\") " pod="openstack/cinder-backup-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.295055 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2-scripts\") pod \"cinder-backup-0\" (UID: \"405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2\") " pod="openstack/cinder-backup-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.295092 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2-config-data-custom\") pod \"cinder-backup-0\" (UID: \"405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2\") " pod="openstack/cinder-backup-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.295118 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2\") " pod="openstack/cinder-backup-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.295175 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2-sys\") pod \"cinder-backup-0\" (UID: \"405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2\") " pod="openstack/cinder-backup-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.295207 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2-etc-nvme\") pod \"cinder-backup-0\" (UID: \"405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2\") " pod="openstack/cinder-backup-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.295241 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2-config-data\") pod \"cinder-backup-0\" (UID: \"405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2\") " pod="openstack/cinder-backup-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.295262 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2-ceph\") pod \"cinder-backup-0\" (UID: \"405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2\") " pod="openstack/cinder-backup-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.295286 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2\") " pod="openstack/cinder-backup-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.295320 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2\") " pod="openstack/cinder-backup-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.295340 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2-lib-modules\") pod \"cinder-backup-0\" (UID: \"405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2\") " pod="openstack/cinder-backup-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.295375 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2-dev\") pod \"cinder-backup-0\" (UID: \"405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2\") " pod="openstack/cinder-backup-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.347596 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-volume-volume1-0"] Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.350487 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume1-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.354666 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-volume-volume1-config-data" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.368695 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-volume1-0"] Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.397322 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/c72961de-9fe8-4a03-b6df-d12de65986f1-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"c72961de-9fe8-4a03-b6df-d12de65986f1\") " pod="openstack/cinder-volume-volume1-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.397561 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/c72961de-9fe8-4a03-b6df-d12de65986f1-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"c72961de-9fe8-4a03-b6df-d12de65986f1\") " pod="openstack/cinder-volume-volume1-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.397635 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/c72961de-9fe8-4a03-b6df-d12de65986f1-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"c72961de-9fe8-4a03-b6df-d12de65986f1\") " pod="openstack/cinder-volume-volume1-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.397706 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2\") " pod="openstack/cinder-backup-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.398221 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/c72961de-9fe8-4a03-b6df-d12de65986f1-sys\") pod \"cinder-volume-volume1-0\" (UID: \"c72961de-9fe8-4a03-b6df-d12de65986f1\") " pod="openstack/cinder-volume-volume1-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.398304 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2-lib-modules\") pod \"cinder-backup-0\" (UID: \"405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2\") " pod="openstack/cinder-backup-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.398390 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/c72961de-9fe8-4a03-b6df-d12de65986f1-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"c72961de-9fe8-4a03-b6df-d12de65986f1\") " pod="openstack/cinder-volume-volume1-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.403158 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2-dev\") pod \"cinder-backup-0\" (UID: \"405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2\") " pod="openstack/cinder-backup-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.403265 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2\") " pod="openstack/cinder-backup-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.403413 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2\") " pod="openstack/cinder-backup-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.403481 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2-run\") pod \"cinder-backup-0\" (UID: \"405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2\") " pod="openstack/cinder-backup-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.403566 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c72961de-9fe8-4a03-b6df-d12de65986f1-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"c72961de-9fe8-4a03-b6df-d12de65986f1\") " pod="openstack/cinder-volume-volume1-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.403631 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/c72961de-9fe8-4a03-b6df-d12de65986f1-dev\") pod \"cinder-volume-volume1-0\" (UID: \"c72961de-9fe8-4a03-b6df-d12de65986f1\") " pod="openstack/cinder-volume-volume1-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.403703 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g2g6c\" (UniqueName: \"kubernetes.io/projected/405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2-kube-api-access-g2g6c\") pod \"cinder-backup-0\" (UID: \"405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2\") " pod="openstack/cinder-backup-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.403783 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2\") " pod="openstack/cinder-backup-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.403843 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2-scripts\") pod \"cinder-backup-0\" (UID: \"405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2\") " pod="openstack/cinder-backup-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.403944 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2-config-data-custom\") pod \"cinder-backup-0\" (UID: \"405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2\") " pod="openstack/cinder-backup-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.404028 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2\") " pod="openstack/cinder-backup-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.404096 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qzgpk\" (UniqueName: \"kubernetes.io/projected/c72961de-9fe8-4a03-b6df-d12de65986f1-kube-api-access-qzgpk\") pod \"cinder-volume-volume1-0\" (UID: \"c72961de-9fe8-4a03-b6df-d12de65986f1\") " pod="openstack/cinder-volume-volume1-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.404186 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/c72961de-9fe8-4a03-b6df-d12de65986f1-run\") pod \"cinder-volume-volume1-0\" (UID: \"c72961de-9fe8-4a03-b6df-d12de65986f1\") " pod="openstack/cinder-volume-volume1-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.404244 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c72961de-9fe8-4a03-b6df-d12de65986f1-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"c72961de-9fe8-4a03-b6df-d12de65986f1\") " pod="openstack/cinder-volume-volume1-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.404315 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/c72961de-9fe8-4a03-b6df-d12de65986f1-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"c72961de-9fe8-4a03-b6df-d12de65986f1\") " pod="openstack/cinder-volume-volume1-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.404383 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c72961de-9fe8-4a03-b6df-d12de65986f1-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"c72961de-9fe8-4a03-b6df-d12de65986f1\") " pod="openstack/cinder-volume-volume1-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.398217 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2\") " pod="openstack/cinder-backup-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.404585 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2-dev\") pod \"cinder-backup-0\" (UID: \"405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2\") " pod="openstack/cinder-backup-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.401851 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2-lib-modules\") pod \"cinder-backup-0\" (UID: \"405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2\") " pod="openstack/cinder-backup-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.404931 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2\") " pod="openstack/cinder-backup-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.404454 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/c72961de-9fe8-4a03-b6df-d12de65986f1-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"c72961de-9fe8-4a03-b6df-d12de65986f1\") " pod="openstack/cinder-volume-volume1-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.405292 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2-sys\") pod \"cinder-backup-0\" (UID: \"405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2\") " pod="openstack/cinder-backup-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.405680 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2-sys\") pod \"cinder-backup-0\" (UID: \"405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2\") " pod="openstack/cinder-backup-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.405412 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2-run\") pod \"cinder-backup-0\" (UID: \"405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2\") " pod="openstack/cinder-backup-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.405790 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2-etc-nvme\") pod \"cinder-backup-0\" (UID: \"405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2\") " pod="openstack/cinder-backup-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.405870 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c72961de-9fe8-4a03-b6df-d12de65986f1-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"c72961de-9fe8-4a03-b6df-d12de65986f1\") " pod="openstack/cinder-volume-volume1-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.405989 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2-etc-nvme\") pod \"cinder-backup-0\" (UID: \"405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2\") " pod="openstack/cinder-backup-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.406014 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2-config-data\") pod \"cinder-backup-0\" (UID: \"405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2\") " pod="openstack/cinder-backup-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.406121 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c72961de-9fe8-4a03-b6df-d12de65986f1-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"c72961de-9fe8-4a03-b6df-d12de65986f1\") " pod="openstack/cinder-volume-volume1-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.406191 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2-ceph\") pod \"cinder-backup-0\" (UID: \"405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2\") " pod="openstack/cinder-backup-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.406265 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/c72961de-9fe8-4a03-b6df-d12de65986f1-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"c72961de-9fe8-4a03-b6df-d12de65986f1\") " pod="openstack/cinder-volume-volume1-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.406346 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2\") " pod="openstack/cinder-backup-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.406550 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2\") " pod="openstack/cinder-backup-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.405373 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2\") " pod="openstack/cinder-backup-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.405431 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2\") " pod="openstack/cinder-backup-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.431548 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2-scripts\") pod \"cinder-backup-0\" (UID: \"405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2\") " pod="openstack/cinder-backup-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.432245 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2-config-data\") pod \"cinder-backup-0\" (UID: \"405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2\") " pod="openstack/cinder-backup-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.432410 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2-config-data-custom\") pod \"cinder-backup-0\" (UID: \"405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2\") " pod="openstack/cinder-backup-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.432787 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2-ceph\") pod \"cinder-backup-0\" (UID: \"405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2\") " pod="openstack/cinder-backup-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.436538 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g2g6c\" (UniqueName: \"kubernetes.io/projected/405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2-kube-api-access-g2g6c\") pod \"cinder-backup-0\" (UID: \"405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2\") " pod="openstack/cinder-backup-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.437440 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2\") " pod="openstack/cinder-backup-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.507783 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qzgpk\" (UniqueName: \"kubernetes.io/projected/c72961de-9fe8-4a03-b6df-d12de65986f1-kube-api-access-qzgpk\") pod \"cinder-volume-volume1-0\" (UID: \"c72961de-9fe8-4a03-b6df-d12de65986f1\") " pod="openstack/cinder-volume-volume1-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.508296 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/c72961de-9fe8-4a03-b6df-d12de65986f1-run\") pod \"cinder-volume-volume1-0\" (UID: \"c72961de-9fe8-4a03-b6df-d12de65986f1\") " pod="openstack/cinder-volume-volume1-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.508477 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c72961de-9fe8-4a03-b6df-d12de65986f1-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"c72961de-9fe8-4a03-b6df-d12de65986f1\") " pod="openstack/cinder-volume-volume1-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.509392 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c72961de-9fe8-4a03-b6df-d12de65986f1-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"c72961de-9fe8-4a03-b6df-d12de65986f1\") " pod="openstack/cinder-volume-volume1-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.509498 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/c72961de-9fe8-4a03-b6df-d12de65986f1-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"c72961de-9fe8-4a03-b6df-d12de65986f1\") " pod="openstack/cinder-volume-volume1-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.509604 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/c72961de-9fe8-4a03-b6df-d12de65986f1-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"c72961de-9fe8-4a03-b6df-d12de65986f1\") " pod="openstack/cinder-volume-volume1-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.509734 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c72961de-9fe8-4a03-b6df-d12de65986f1-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"c72961de-9fe8-4a03-b6df-d12de65986f1\") " pod="openstack/cinder-volume-volume1-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.509844 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c72961de-9fe8-4a03-b6df-d12de65986f1-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"c72961de-9fe8-4a03-b6df-d12de65986f1\") " pod="openstack/cinder-volume-volume1-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.508435 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/c72961de-9fe8-4a03-b6df-d12de65986f1-run\") pod \"cinder-volume-volume1-0\" (UID: \"c72961de-9fe8-4a03-b6df-d12de65986f1\") " pod="openstack/cinder-volume-volume1-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.510030 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/c72961de-9fe8-4a03-b6df-d12de65986f1-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"c72961de-9fe8-4a03-b6df-d12de65986f1\") " pod="openstack/cinder-volume-volume1-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.510133 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/c72961de-9fe8-4a03-b6df-d12de65986f1-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"c72961de-9fe8-4a03-b6df-d12de65986f1\") " pod="openstack/cinder-volume-volume1-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.510250 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/c72961de-9fe8-4a03-b6df-d12de65986f1-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"c72961de-9fe8-4a03-b6df-d12de65986f1\") " pod="openstack/cinder-volume-volume1-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.510339 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/c72961de-9fe8-4a03-b6df-d12de65986f1-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"c72961de-9fe8-4a03-b6df-d12de65986f1\") " pod="openstack/cinder-volume-volume1-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.510426 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/c72961de-9fe8-4a03-b6df-d12de65986f1-sys\") pod \"cinder-volume-volume1-0\" (UID: \"c72961de-9fe8-4a03-b6df-d12de65986f1\") " pod="openstack/cinder-volume-volume1-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.510548 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/c72961de-9fe8-4a03-b6df-d12de65986f1-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"c72961de-9fe8-4a03-b6df-d12de65986f1\") " pod="openstack/cinder-volume-volume1-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.510799 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c72961de-9fe8-4a03-b6df-d12de65986f1-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"c72961de-9fe8-4a03-b6df-d12de65986f1\") " pod="openstack/cinder-volume-volume1-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.510874 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/c72961de-9fe8-4a03-b6df-d12de65986f1-dev\") pod \"cinder-volume-volume1-0\" (UID: \"c72961de-9fe8-4a03-b6df-d12de65986f1\") " pod="openstack/cinder-volume-volume1-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.511097 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/c72961de-9fe8-4a03-b6df-d12de65986f1-dev\") pod \"cinder-volume-volume1-0\" (UID: \"c72961de-9fe8-4a03-b6df-d12de65986f1\") " pod="openstack/cinder-volume-volume1-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.511226 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/c72961de-9fe8-4a03-b6df-d12de65986f1-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"c72961de-9fe8-4a03-b6df-d12de65986f1\") " pod="openstack/cinder-volume-volume1-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.512025 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/c72961de-9fe8-4a03-b6df-d12de65986f1-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"c72961de-9fe8-4a03-b6df-d12de65986f1\") " pod="openstack/cinder-volume-volume1-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.510164 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/c72961de-9fe8-4a03-b6df-d12de65986f1-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"c72961de-9fe8-4a03-b6df-d12de65986f1\") " pod="openstack/cinder-volume-volume1-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.512147 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/c72961de-9fe8-4a03-b6df-d12de65986f1-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"c72961de-9fe8-4a03-b6df-d12de65986f1\") " pod="openstack/cinder-volume-volume1-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.510197 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/c72961de-9fe8-4a03-b6df-d12de65986f1-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"c72961de-9fe8-4a03-b6df-d12de65986f1\") " pod="openstack/cinder-volume-volume1-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.512199 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/c72961de-9fe8-4a03-b6df-d12de65986f1-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"c72961de-9fe8-4a03-b6df-d12de65986f1\") " pod="openstack/cinder-volume-volume1-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.512230 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/c72961de-9fe8-4a03-b6df-d12de65986f1-sys\") pod \"cinder-volume-volume1-0\" (UID: \"c72961de-9fe8-4a03-b6df-d12de65986f1\") " pod="openstack/cinder-volume-volume1-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.512791 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c72961de-9fe8-4a03-b6df-d12de65986f1-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"c72961de-9fe8-4a03-b6df-d12de65986f1\") " pod="openstack/cinder-volume-volume1-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.524724 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/c72961de-9fe8-4a03-b6df-d12de65986f1-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"c72961de-9fe8-4a03-b6df-d12de65986f1\") " pod="openstack/cinder-volume-volume1-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.533811 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c72961de-9fe8-4a03-b6df-d12de65986f1-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"c72961de-9fe8-4a03-b6df-d12de65986f1\") " pod="openstack/cinder-volume-volume1-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.534618 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.540557 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c72961de-9fe8-4a03-b6df-d12de65986f1-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"c72961de-9fe8-4a03-b6df-d12de65986f1\") " pod="openstack/cinder-volume-volume1-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.546844 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c72961de-9fe8-4a03-b6df-d12de65986f1-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"c72961de-9fe8-4a03-b6df-d12de65986f1\") " pod="openstack/cinder-volume-volume1-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.546849 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qzgpk\" (UniqueName: \"kubernetes.io/projected/c72961de-9fe8-4a03-b6df-d12de65986f1-kube-api-access-qzgpk\") pod \"cinder-volume-volume1-0\" (UID: \"c72961de-9fe8-4a03-b6df-d12de65986f1\") " pod="openstack/cinder-volume-volume1-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.557665 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c72961de-9fe8-4a03-b6df-d12de65986f1-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"c72961de-9fe8-4a03-b6df-d12de65986f1\") " pod="openstack/cinder-volume-volume1-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.691588 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume1-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.759266 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-volume-volume2-0"] Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.778731 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-volume2-0"] Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.778838 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume2-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.782936 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-volume-volume2-config-data" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.967284 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79feadef-f3f0-4d05-94ce-9edadb69bb6e-combined-ca-bundle\") pod \"cinder-volume-volume2-0\" (UID: \"79feadef-f3f0-4d05-94ce-9edadb69bb6e\") " pod="openstack/cinder-volume-volume2-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.967634 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/79feadef-f3f0-4d05-94ce-9edadb69bb6e-etc-nvme\") pod \"cinder-volume-volume2-0\" (UID: \"79feadef-f3f0-4d05-94ce-9edadb69bb6e\") " pod="openstack/cinder-volume-volume2-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.967675 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/79feadef-f3f0-4d05-94ce-9edadb69bb6e-ceph\") pod \"cinder-volume-volume2-0\" (UID: \"79feadef-f3f0-4d05-94ce-9edadb69bb6e\") " pod="openstack/cinder-volume-volume2-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.967690 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/79feadef-f3f0-4d05-94ce-9edadb69bb6e-dev\") pod \"cinder-volume-volume2-0\" (UID: \"79feadef-f3f0-4d05-94ce-9edadb69bb6e\") " pod="openstack/cinder-volume-volume2-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.967735 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/79feadef-f3f0-4d05-94ce-9edadb69bb6e-var-lib-cinder\") pod \"cinder-volume-volume2-0\" (UID: \"79feadef-f3f0-4d05-94ce-9edadb69bb6e\") " pod="openstack/cinder-volume-volume2-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.967757 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/79feadef-f3f0-4d05-94ce-9edadb69bb6e-scripts\") pod \"cinder-volume-volume2-0\" (UID: \"79feadef-f3f0-4d05-94ce-9edadb69bb6e\") " pod="openstack/cinder-volume-volume2-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.967777 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/79feadef-f3f0-4d05-94ce-9edadb69bb6e-var-locks-brick\") pod \"cinder-volume-volume2-0\" (UID: \"79feadef-f3f0-4d05-94ce-9edadb69bb6e\") " pod="openstack/cinder-volume-volume2-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.967821 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/79feadef-f3f0-4d05-94ce-9edadb69bb6e-lib-modules\") pod \"cinder-volume-volume2-0\" (UID: \"79feadef-f3f0-4d05-94ce-9edadb69bb6e\") " pod="openstack/cinder-volume-volume2-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.967852 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/79feadef-f3f0-4d05-94ce-9edadb69bb6e-var-locks-cinder\") pod \"cinder-volume-volume2-0\" (UID: \"79feadef-f3f0-4d05-94ce-9edadb69bb6e\") " pod="openstack/cinder-volume-volume2-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.967882 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/79feadef-f3f0-4d05-94ce-9edadb69bb6e-run\") pod \"cinder-volume-volume2-0\" (UID: \"79feadef-f3f0-4d05-94ce-9edadb69bb6e\") " pod="openstack/cinder-volume-volume2-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.968554 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/79feadef-f3f0-4d05-94ce-9edadb69bb6e-config-data\") pod \"cinder-volume-volume2-0\" (UID: \"79feadef-f3f0-4d05-94ce-9edadb69bb6e\") " pod="openstack/cinder-volume-volume2-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.968587 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/79feadef-f3f0-4d05-94ce-9edadb69bb6e-sys\") pod \"cinder-volume-volume2-0\" (UID: \"79feadef-f3f0-4d05-94ce-9edadb69bb6e\") " pod="openstack/cinder-volume-volume2-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.968721 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kmfqr\" (UniqueName: \"kubernetes.io/projected/79feadef-f3f0-4d05-94ce-9edadb69bb6e-kube-api-access-kmfqr\") pod \"cinder-volume-volume2-0\" (UID: \"79feadef-f3f0-4d05-94ce-9edadb69bb6e\") " pod="openstack/cinder-volume-volume2-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.968745 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/79feadef-f3f0-4d05-94ce-9edadb69bb6e-config-data-custom\") pod \"cinder-volume-volume2-0\" (UID: \"79feadef-f3f0-4d05-94ce-9edadb69bb6e\") " pod="openstack/cinder-volume-volume2-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.968799 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/79feadef-f3f0-4d05-94ce-9edadb69bb6e-etc-iscsi\") pod \"cinder-volume-volume2-0\" (UID: \"79feadef-f3f0-4d05-94ce-9edadb69bb6e\") " pod="openstack/cinder-volume-volume2-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.968837 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/79feadef-f3f0-4d05-94ce-9edadb69bb6e-etc-machine-id\") pod \"cinder-volume-volume2-0\" (UID: \"79feadef-f3f0-4d05-94ce-9edadb69bb6e\") " pod="openstack/cinder-volume-volume2-0" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.980489 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-8658bb8f6f-ltnkm"] Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.982407 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-8658bb8f6f-ltnkm" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.987754 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.993580 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.993714 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-6nhqt" Sep 29 10:25:30 crc kubenswrapper[4779]: I0929 10:25:30.993776 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.002604 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-8658bb8f6f-ltnkm"] Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.070511 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/79feadef-f3f0-4d05-94ce-9edadb69bb6e-var-locks-cinder\") pod \"cinder-volume-volume2-0\" (UID: \"79feadef-f3f0-4d05-94ce-9edadb69bb6e\") " pod="openstack/cinder-volume-volume2-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.070596 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/79feadef-f3f0-4d05-94ce-9edadb69bb6e-run\") pod \"cinder-volume-volume2-0\" (UID: \"79feadef-f3f0-4d05-94ce-9edadb69bb6e\") " pod="openstack/cinder-volume-volume2-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.070653 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/79feadef-f3f0-4d05-94ce-9edadb69bb6e-config-data\") pod \"cinder-volume-volume2-0\" (UID: \"79feadef-f3f0-4d05-94ce-9edadb69bb6e\") " pod="openstack/cinder-volume-volume2-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.070681 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/79feadef-f3f0-4d05-94ce-9edadb69bb6e-sys\") pod \"cinder-volume-volume2-0\" (UID: \"79feadef-f3f0-4d05-94ce-9edadb69bb6e\") " pod="openstack/cinder-volume-volume2-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.070745 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kmfqr\" (UniqueName: \"kubernetes.io/projected/79feadef-f3f0-4d05-94ce-9edadb69bb6e-kube-api-access-kmfqr\") pod \"cinder-volume-volume2-0\" (UID: \"79feadef-f3f0-4d05-94ce-9edadb69bb6e\") " pod="openstack/cinder-volume-volume2-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.070768 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/79feadef-f3f0-4d05-94ce-9edadb69bb6e-config-data-custom\") pod \"cinder-volume-volume2-0\" (UID: \"79feadef-f3f0-4d05-94ce-9edadb69bb6e\") " pod="openstack/cinder-volume-volume2-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.070798 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/79feadef-f3f0-4d05-94ce-9edadb69bb6e-etc-iscsi\") pod \"cinder-volume-volume2-0\" (UID: \"79feadef-f3f0-4d05-94ce-9edadb69bb6e\") " pod="openstack/cinder-volume-volume2-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.070839 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/79feadef-f3f0-4d05-94ce-9edadb69bb6e-etc-machine-id\") pod \"cinder-volume-volume2-0\" (UID: \"79feadef-f3f0-4d05-94ce-9edadb69bb6e\") " pod="openstack/cinder-volume-volume2-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.070877 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79feadef-f3f0-4d05-94ce-9edadb69bb6e-combined-ca-bundle\") pod \"cinder-volume-volume2-0\" (UID: \"79feadef-f3f0-4d05-94ce-9edadb69bb6e\") " pod="openstack/cinder-volume-volume2-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.070930 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/79feadef-f3f0-4d05-94ce-9edadb69bb6e-etc-nvme\") pod \"cinder-volume-volume2-0\" (UID: \"79feadef-f3f0-4d05-94ce-9edadb69bb6e\") " pod="openstack/cinder-volume-volume2-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.070980 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/79feadef-f3f0-4d05-94ce-9edadb69bb6e-dev\") pod \"cinder-volume-volume2-0\" (UID: \"79feadef-f3f0-4d05-94ce-9edadb69bb6e\") " pod="openstack/cinder-volume-volume2-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.071002 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/79feadef-f3f0-4d05-94ce-9edadb69bb6e-ceph\") pod \"cinder-volume-volume2-0\" (UID: \"79feadef-f3f0-4d05-94ce-9edadb69bb6e\") " pod="openstack/cinder-volume-volume2-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.071061 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/79feadef-f3f0-4d05-94ce-9edadb69bb6e-var-lib-cinder\") pod \"cinder-volume-volume2-0\" (UID: \"79feadef-f3f0-4d05-94ce-9edadb69bb6e\") " pod="openstack/cinder-volume-volume2-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.071107 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/79feadef-f3f0-4d05-94ce-9edadb69bb6e-scripts\") pod \"cinder-volume-volume2-0\" (UID: \"79feadef-f3f0-4d05-94ce-9edadb69bb6e\") " pod="openstack/cinder-volume-volume2-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.071141 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/79feadef-f3f0-4d05-94ce-9edadb69bb6e-var-locks-brick\") pod \"cinder-volume-volume2-0\" (UID: \"79feadef-f3f0-4d05-94ce-9edadb69bb6e\") " pod="openstack/cinder-volume-volume2-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.071211 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/79feadef-f3f0-4d05-94ce-9edadb69bb6e-lib-modules\") pod \"cinder-volume-volume2-0\" (UID: \"79feadef-f3f0-4d05-94ce-9edadb69bb6e\") " pod="openstack/cinder-volume-volume2-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.071351 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/79feadef-f3f0-4d05-94ce-9edadb69bb6e-lib-modules\") pod \"cinder-volume-volume2-0\" (UID: \"79feadef-f3f0-4d05-94ce-9edadb69bb6e\") " pod="openstack/cinder-volume-volume2-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.071447 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/79feadef-f3f0-4d05-94ce-9edadb69bb6e-var-locks-cinder\") pod \"cinder-volume-volume2-0\" (UID: \"79feadef-f3f0-4d05-94ce-9edadb69bb6e\") " pod="openstack/cinder-volume-volume2-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.071489 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/79feadef-f3f0-4d05-94ce-9edadb69bb6e-run\") pod \"cinder-volume-volume2-0\" (UID: \"79feadef-f3f0-4d05-94ce-9edadb69bb6e\") " pod="openstack/cinder-volume-volume2-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.101493 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/79feadef-f3f0-4d05-94ce-9edadb69bb6e-sys\") pod \"cinder-volume-volume2-0\" (UID: \"79feadef-f3f0-4d05-94ce-9edadb69bb6e\") " pod="openstack/cinder-volume-volume2-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.101753 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/79feadef-f3f0-4d05-94ce-9edadb69bb6e-etc-iscsi\") pod \"cinder-volume-volume2-0\" (UID: \"79feadef-f3f0-4d05-94ce-9edadb69bb6e\") " pod="openstack/cinder-volume-volume2-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.101796 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/79feadef-f3f0-4d05-94ce-9edadb69bb6e-etc-nvme\") pod \"cinder-volume-volume2-0\" (UID: \"79feadef-f3f0-4d05-94ce-9edadb69bb6e\") " pod="openstack/cinder-volume-volume2-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.101821 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/79feadef-f3f0-4d05-94ce-9edadb69bb6e-etc-machine-id\") pod \"cinder-volume-volume2-0\" (UID: \"79feadef-f3f0-4d05-94ce-9edadb69bb6e\") " pod="openstack/cinder-volume-volume2-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.101856 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/79feadef-f3f0-4d05-94ce-9edadb69bb6e-var-locks-brick\") pod \"cinder-volume-volume2-0\" (UID: \"79feadef-f3f0-4d05-94ce-9edadb69bb6e\") " pod="openstack/cinder-volume-volume2-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.101879 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/79feadef-f3f0-4d05-94ce-9edadb69bb6e-dev\") pod \"cinder-volume-volume2-0\" (UID: \"79feadef-f3f0-4d05-94ce-9edadb69bb6e\") " pod="openstack/cinder-volume-volume2-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.101967 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/79feadef-f3f0-4d05-94ce-9edadb69bb6e-var-lib-cinder\") pod \"cinder-volume-volume2-0\" (UID: \"79feadef-f3f0-4d05-94ce-9edadb69bb6e\") " pod="openstack/cinder-volume-volume2-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.108094 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79feadef-f3f0-4d05-94ce-9edadb69bb6e-combined-ca-bundle\") pod \"cinder-volume-volume2-0\" (UID: \"79feadef-f3f0-4d05-94ce-9edadb69bb6e\") " pod="openstack/cinder-volume-volume2-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.113104 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/79feadef-f3f0-4d05-94ce-9edadb69bb6e-scripts\") pod \"cinder-volume-volume2-0\" (UID: \"79feadef-f3f0-4d05-94ce-9edadb69bb6e\") " pod="openstack/cinder-volume-volume2-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.114720 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/79feadef-f3f0-4d05-94ce-9edadb69bb6e-config-data\") pod \"cinder-volume-volume2-0\" (UID: \"79feadef-f3f0-4d05-94ce-9edadb69bb6e\") " pod="openstack/cinder-volume-volume2-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.125005 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-675f5c7fcc-8k9c8"] Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.126991 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-675f5c7fcc-8k9c8" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.128272 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/79feadef-f3f0-4d05-94ce-9edadb69bb6e-ceph\") pod \"cinder-volume-volume2-0\" (UID: \"79feadef-f3f0-4d05-94ce-9edadb69bb6e\") " pod="openstack/cinder-volume-volume2-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.161938 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/79feadef-f3f0-4d05-94ce-9edadb69bb6e-config-data-custom\") pod \"cinder-volume-volume2-0\" (UID: \"79feadef-f3f0-4d05-94ce-9edadb69bb6e\") " pod="openstack/cinder-volume-volume2-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.165311 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.175998 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.182848 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.183236 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.183429 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-rkfkk" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.184278 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.190632 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7-logs\") pod \"horizon-8658bb8f6f-ltnkm\" (UID: \"38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7\") " pod="openstack/horizon-8658bb8f6f-ltnkm" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.191138 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7-horizon-secret-key\") pod \"horizon-8658bb8f6f-ltnkm\" (UID: \"38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7\") " pod="openstack/horizon-8658bb8f6f-ltnkm" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.191318 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7-scripts\") pod \"horizon-8658bb8f6f-ltnkm\" (UID: \"38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7\") " pod="openstack/horizon-8658bb8f6f-ltnkm" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.191532 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wp947\" (UniqueName: \"kubernetes.io/projected/38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7-kube-api-access-wp947\") pod \"horizon-8658bb8f6f-ltnkm\" (UID: \"38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7\") " pod="openstack/horizon-8658bb8f6f-ltnkm" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.191771 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7-config-data\") pod \"horizon-8658bb8f6f-ltnkm\" (UID: \"38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7\") " pod="openstack/horizon-8658bb8f6f-ltnkm" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.196214 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kmfqr\" (UniqueName: \"kubernetes.io/projected/79feadef-f3f0-4d05-94ce-9edadb69bb6e-kube-api-access-kmfqr\") pod \"cinder-volume-volume2-0\" (UID: \"79feadef-f3f0-4d05-94ce-9edadb69bb6e\") " pod="openstack/cinder-volume-volume2-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.210302 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-675f5c7fcc-8k9c8"] Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.247282 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.294022 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7-config-data\") pod \"horizon-8658bb8f6f-ltnkm\" (UID: \"38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7\") " pod="openstack/horizon-8658bb8f6f-ltnkm" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.294097 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/23901a8a-449b-4852-8b75-c1615c0501f7-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"23901a8a-449b-4852-8b75-c1615c0501f7\") " pod="openstack/glance-default-external-api-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.294143 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/23901a8a-449b-4852-8b75-c1615c0501f7-ceph\") pod \"glance-default-external-api-0\" (UID: \"23901a8a-449b-4852-8b75-c1615c0501f7\") " pod="openstack/glance-default-external-api-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.294194 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/27d399f1-4a87-47f3-8b0c-6a28960f18aa-horizon-secret-key\") pod \"horizon-675f5c7fcc-8k9c8\" (UID: \"27d399f1-4a87-47f3-8b0c-6a28960f18aa\") " pod="openstack/horizon-675f5c7fcc-8k9c8" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.294227 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23901a8a-449b-4852-8b75-c1615c0501f7-scripts\") pod \"glance-default-external-api-0\" (UID: \"23901a8a-449b-4852-8b75-c1615c0501f7\") " pod="openstack/glance-default-external-api-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.294256 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7-logs\") pod \"horizon-8658bb8f6f-ltnkm\" (UID: \"38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7\") " pod="openstack/horizon-8658bb8f6f-ltnkm" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.294316 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7-horizon-secret-key\") pod \"horizon-8658bb8f6f-ltnkm\" (UID: \"38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7\") " pod="openstack/horizon-8658bb8f6f-ltnkm" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.294388 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/27d399f1-4a87-47f3-8b0c-6a28960f18aa-config-data\") pod \"horizon-675f5c7fcc-8k9c8\" (UID: \"27d399f1-4a87-47f3-8b0c-6a28960f18aa\") " pod="openstack/horizon-675f5c7fcc-8k9c8" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.294458 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7-scripts\") pod \"horizon-8658bb8f6f-ltnkm\" (UID: \"38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7\") " pod="openstack/horizon-8658bb8f6f-ltnkm" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.294482 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23901a8a-449b-4852-8b75-c1615c0501f7-config-data\") pod \"glance-default-external-api-0\" (UID: \"23901a8a-449b-4852-8b75-c1615c0501f7\") " pod="openstack/glance-default-external-api-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.294510 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/23901a8a-449b-4852-8b75-c1615c0501f7-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"23901a8a-449b-4852-8b75-c1615c0501f7\") " pod="openstack/glance-default-external-api-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.294537 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/23901a8a-449b-4852-8b75-c1615c0501f7-logs\") pod \"glance-default-external-api-0\" (UID: \"23901a8a-449b-4852-8b75-c1615c0501f7\") " pod="openstack/glance-default-external-api-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.294588 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9w4kn\" (UniqueName: \"kubernetes.io/projected/23901a8a-449b-4852-8b75-c1615c0501f7-kube-api-access-9w4kn\") pod \"glance-default-external-api-0\" (UID: \"23901a8a-449b-4852-8b75-c1615c0501f7\") " pod="openstack/glance-default-external-api-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.294627 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23901a8a-449b-4852-8b75-c1615c0501f7-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"23901a8a-449b-4852-8b75-c1615c0501f7\") " pod="openstack/glance-default-external-api-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.294653 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/27d399f1-4a87-47f3-8b0c-6a28960f18aa-logs\") pod \"horizon-675f5c7fcc-8k9c8\" (UID: \"27d399f1-4a87-47f3-8b0c-6a28960f18aa\") " pod="openstack/horizon-675f5c7fcc-8k9c8" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.294693 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/27d399f1-4a87-47f3-8b0c-6a28960f18aa-scripts\") pod \"horizon-675f5c7fcc-8k9c8\" (UID: \"27d399f1-4a87-47f3-8b0c-6a28960f18aa\") " pod="openstack/horizon-675f5c7fcc-8k9c8" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.294726 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-external-api-0\" (UID: \"23901a8a-449b-4852-8b75-c1615c0501f7\") " pod="openstack/glance-default-external-api-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.294755 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wp947\" (UniqueName: \"kubernetes.io/projected/38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7-kube-api-access-wp947\") pod \"horizon-8658bb8f6f-ltnkm\" (UID: \"38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7\") " pod="openstack/horizon-8658bb8f6f-ltnkm" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.294836 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nkzrb\" (UniqueName: \"kubernetes.io/projected/27d399f1-4a87-47f3-8b0c-6a28960f18aa-kube-api-access-nkzrb\") pod \"horizon-675f5c7fcc-8k9c8\" (UID: \"27d399f1-4a87-47f3-8b0c-6a28960f18aa\") " pod="openstack/horizon-675f5c7fcc-8k9c8" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.296276 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7-logs\") pod \"horizon-8658bb8f6f-ltnkm\" (UID: \"38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7\") " pod="openstack/horizon-8658bb8f6f-ltnkm" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.296598 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.296874 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7-config-data\") pod \"horizon-8658bb8f6f-ltnkm\" (UID: \"38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7\") " pod="openstack/horizon-8658bb8f6f-ltnkm" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.300937 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7-horizon-secret-key\") pod \"horizon-8658bb8f6f-ltnkm\" (UID: \"38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7\") " pod="openstack/horizon-8658bb8f6f-ltnkm" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.305477 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.308147 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7-scripts\") pod \"horizon-8658bb8f6f-ltnkm\" (UID: \"38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7\") " pod="openstack/horizon-8658bb8f6f-ltnkm" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.312053 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.316844 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.333787 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.334444 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wp947\" (UniqueName: \"kubernetes.io/projected/38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7-kube-api-access-wp947\") pod \"horizon-8658bb8f6f-ltnkm\" (UID: \"38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7\") " pod="openstack/horizon-8658bb8f6f-ltnkm" Sep 29 10:25:31 crc kubenswrapper[4779]: E0929 10:25:31.334467 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[ceph combined-ca-bundle config-data glance httpd-run internal-tls-certs kube-api-access-nsts4 logs scripts], unattached volumes=[], failed to process volumes=[ceph combined-ca-bundle config-data glance httpd-run internal-tls-certs kube-api-access-nsts4 logs scripts]: context canceled" pod="openstack/glance-default-internal-api-0" podUID="677f5eb5-79ad-4366-8623-cbb385372d65" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.335310 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-8658bb8f6f-ltnkm" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.364451 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.369344 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.392748 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.398938 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/677f5eb5-79ad-4366-8623-cbb385372d65-logs\") pod \"glance-default-internal-api-0\" (UID: \"677f5eb5-79ad-4366-8623-cbb385372d65\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.399071 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/677f5eb5-79ad-4366-8623-cbb385372d65-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"677f5eb5-79ad-4366-8623-cbb385372d65\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.399170 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/677f5eb5-79ad-4366-8623-cbb385372d65-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"677f5eb5-79ad-4366-8623-cbb385372d65\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.399249 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/27d399f1-4a87-47f3-8b0c-6a28960f18aa-config-data\") pod \"horizon-675f5c7fcc-8k9c8\" (UID: \"27d399f1-4a87-47f3-8b0c-6a28960f18aa\") " pod="openstack/horizon-675f5c7fcc-8k9c8" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.399293 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/677f5eb5-79ad-4366-8623-cbb385372d65-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"677f5eb5-79ad-4366-8623-cbb385372d65\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.399350 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23901a8a-449b-4852-8b75-c1615c0501f7-config-data\") pod \"glance-default-external-api-0\" (UID: \"23901a8a-449b-4852-8b75-c1615c0501f7\") " pod="openstack/glance-default-external-api-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.399380 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/23901a8a-449b-4852-8b75-c1615c0501f7-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"23901a8a-449b-4852-8b75-c1615c0501f7\") " pod="openstack/glance-default-external-api-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.399402 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/23901a8a-449b-4852-8b75-c1615c0501f7-logs\") pod \"glance-default-external-api-0\" (UID: \"23901a8a-449b-4852-8b75-c1615c0501f7\") " pod="openstack/glance-default-external-api-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.399435 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9w4kn\" (UniqueName: \"kubernetes.io/projected/23901a8a-449b-4852-8b75-c1615c0501f7-kube-api-access-9w4kn\") pod \"glance-default-external-api-0\" (UID: \"23901a8a-449b-4852-8b75-c1615c0501f7\") " pod="openstack/glance-default-external-api-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.399465 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23901a8a-449b-4852-8b75-c1615c0501f7-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"23901a8a-449b-4852-8b75-c1615c0501f7\") " pod="openstack/glance-default-external-api-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.399486 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/27d399f1-4a87-47f3-8b0c-6a28960f18aa-logs\") pod \"horizon-675f5c7fcc-8k9c8\" (UID: \"27d399f1-4a87-47f3-8b0c-6a28960f18aa\") " pod="openstack/horizon-675f5c7fcc-8k9c8" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.399513 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/27d399f1-4a87-47f3-8b0c-6a28960f18aa-scripts\") pod \"horizon-675f5c7fcc-8k9c8\" (UID: \"27d399f1-4a87-47f3-8b0c-6a28960f18aa\") " pod="openstack/horizon-675f5c7fcc-8k9c8" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.399536 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-external-api-0\" (UID: \"23901a8a-449b-4852-8b75-c1615c0501f7\") " pod="openstack/glance-default-external-api-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.399562 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/677f5eb5-79ad-4366-8623-cbb385372d65-scripts\") pod \"glance-default-internal-api-0\" (UID: \"677f5eb5-79ad-4366-8623-cbb385372d65\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.399605 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nkzrb\" (UniqueName: \"kubernetes.io/projected/27d399f1-4a87-47f3-8b0c-6a28960f18aa-kube-api-access-nkzrb\") pod \"horizon-675f5c7fcc-8k9c8\" (UID: \"27d399f1-4a87-47f3-8b0c-6a28960f18aa\") " pod="openstack/horizon-675f5c7fcc-8k9c8" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.399625 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nsts4\" (UniqueName: \"kubernetes.io/projected/677f5eb5-79ad-4366-8623-cbb385372d65-kube-api-access-nsts4\") pod \"glance-default-internal-api-0\" (UID: \"677f5eb5-79ad-4366-8623-cbb385372d65\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.399653 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/23901a8a-449b-4852-8b75-c1615c0501f7-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"23901a8a-449b-4852-8b75-c1615c0501f7\") " pod="openstack/glance-default-external-api-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.399678 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/23901a8a-449b-4852-8b75-c1615c0501f7-ceph\") pod \"glance-default-external-api-0\" (UID: \"23901a8a-449b-4852-8b75-c1615c0501f7\") " pod="openstack/glance-default-external-api-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.399698 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/677f5eb5-79ad-4366-8623-cbb385372d65-ceph\") pod \"glance-default-internal-api-0\" (UID: \"677f5eb5-79ad-4366-8623-cbb385372d65\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.399725 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/677f5eb5-79ad-4366-8623-cbb385372d65-config-data\") pod \"glance-default-internal-api-0\" (UID: \"677f5eb5-79ad-4366-8623-cbb385372d65\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.399744 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"677f5eb5-79ad-4366-8623-cbb385372d65\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.399766 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/27d399f1-4a87-47f3-8b0c-6a28960f18aa-horizon-secret-key\") pod \"horizon-675f5c7fcc-8k9c8\" (UID: \"27d399f1-4a87-47f3-8b0c-6a28960f18aa\") " pod="openstack/horizon-675f5c7fcc-8k9c8" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.399789 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23901a8a-449b-4852-8b75-c1615c0501f7-scripts\") pod \"glance-default-external-api-0\" (UID: \"23901a8a-449b-4852-8b75-c1615c0501f7\") " pod="openstack/glance-default-external-api-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.400856 4779 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-external-api-0\" (UID: \"23901a8a-449b-4852-8b75-c1615c0501f7\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/glance-default-external-api-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.402974 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/27d399f1-4a87-47f3-8b0c-6a28960f18aa-scripts\") pod \"horizon-675f5c7fcc-8k9c8\" (UID: \"27d399f1-4a87-47f3-8b0c-6a28960f18aa\") " pod="openstack/horizon-675f5c7fcc-8k9c8" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.403297 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/23901a8a-449b-4852-8b75-c1615c0501f7-logs\") pod \"glance-default-external-api-0\" (UID: \"23901a8a-449b-4852-8b75-c1615c0501f7\") " pod="openstack/glance-default-external-api-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.404455 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23901a8a-449b-4852-8b75-c1615c0501f7-scripts\") pod \"glance-default-external-api-0\" (UID: \"23901a8a-449b-4852-8b75-c1615c0501f7\") " pod="openstack/glance-default-external-api-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.404689 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/23901a8a-449b-4852-8b75-c1615c0501f7-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"23901a8a-449b-4852-8b75-c1615c0501f7\") " pod="openstack/glance-default-external-api-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.405585 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/27d399f1-4a87-47f3-8b0c-6a28960f18aa-logs\") pod \"horizon-675f5c7fcc-8k9c8\" (UID: \"27d399f1-4a87-47f3-8b0c-6a28960f18aa\") " pod="openstack/horizon-675f5c7fcc-8k9c8" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.400937 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/27d399f1-4a87-47f3-8b0c-6a28960f18aa-config-data\") pod \"horizon-675f5c7fcc-8k9c8\" (UID: \"27d399f1-4a87-47f3-8b0c-6a28960f18aa\") " pod="openstack/horizon-675f5c7fcc-8k9c8" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.409050 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/23901a8a-449b-4852-8b75-c1615c0501f7-ceph\") pod \"glance-default-external-api-0\" (UID: \"23901a8a-449b-4852-8b75-c1615c0501f7\") " pod="openstack/glance-default-external-api-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.412731 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23901a8a-449b-4852-8b75-c1615c0501f7-config-data\") pod \"glance-default-external-api-0\" (UID: \"23901a8a-449b-4852-8b75-c1615c0501f7\") " pod="openstack/glance-default-external-api-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.413120 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/27d399f1-4a87-47f3-8b0c-6a28960f18aa-horizon-secret-key\") pod \"horizon-675f5c7fcc-8k9c8\" (UID: \"27d399f1-4a87-47f3-8b0c-6a28960f18aa\") " pod="openstack/horizon-675f5c7fcc-8k9c8" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.421619 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23901a8a-449b-4852-8b75-c1615c0501f7-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"23901a8a-449b-4852-8b75-c1615c0501f7\") " pod="openstack/glance-default-external-api-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.422167 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume2-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.432211 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nkzrb\" (UniqueName: \"kubernetes.io/projected/27d399f1-4a87-47f3-8b0c-6a28960f18aa-kube-api-access-nkzrb\") pod \"horizon-675f5c7fcc-8k9c8\" (UID: \"27d399f1-4a87-47f3-8b0c-6a28960f18aa\") " pod="openstack/horizon-675f5c7fcc-8k9c8" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.423246 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/23901a8a-449b-4852-8b75-c1615c0501f7-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"23901a8a-449b-4852-8b75-c1615c0501f7\") " pod="openstack/glance-default-external-api-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.447386 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9w4kn\" (UniqueName: \"kubernetes.io/projected/23901a8a-449b-4852-8b75-c1615c0501f7-kube-api-access-9w4kn\") pod \"glance-default-external-api-0\" (UID: \"23901a8a-449b-4852-8b75-c1615c0501f7\") " pod="openstack/glance-default-external-api-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.485794 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-external-api-0\" (UID: \"23901a8a-449b-4852-8b75-c1615c0501f7\") " pod="openstack/glance-default-external-api-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.494325 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-675f5c7fcc-8k9c8" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.501623 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/677f5eb5-79ad-4366-8623-cbb385372d65-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"677f5eb5-79ad-4366-8623-cbb385372d65\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.501737 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/677f5eb5-79ad-4366-8623-cbb385372d65-scripts\") pod \"glance-default-internal-api-0\" (UID: \"677f5eb5-79ad-4366-8623-cbb385372d65\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.501777 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nsts4\" (UniqueName: \"kubernetes.io/projected/677f5eb5-79ad-4366-8623-cbb385372d65-kube-api-access-nsts4\") pod \"glance-default-internal-api-0\" (UID: \"677f5eb5-79ad-4366-8623-cbb385372d65\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.501819 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/677f5eb5-79ad-4366-8623-cbb385372d65-ceph\") pod \"glance-default-internal-api-0\" (UID: \"677f5eb5-79ad-4366-8623-cbb385372d65\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.501846 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/677f5eb5-79ad-4366-8623-cbb385372d65-config-data\") pod \"glance-default-internal-api-0\" (UID: \"677f5eb5-79ad-4366-8623-cbb385372d65\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.501864 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"677f5eb5-79ad-4366-8623-cbb385372d65\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.501944 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/677f5eb5-79ad-4366-8623-cbb385372d65-logs\") pod \"glance-default-internal-api-0\" (UID: \"677f5eb5-79ad-4366-8623-cbb385372d65\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.501963 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/677f5eb5-79ad-4366-8623-cbb385372d65-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"677f5eb5-79ad-4366-8623-cbb385372d65\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.501986 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/677f5eb5-79ad-4366-8623-cbb385372d65-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"677f5eb5-79ad-4366-8623-cbb385372d65\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.507305 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/677f5eb5-79ad-4366-8623-cbb385372d65-ceph\") pod \"glance-default-internal-api-0\" (UID: \"677f5eb5-79ad-4366-8623-cbb385372d65\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.507822 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/677f5eb5-79ad-4366-8623-cbb385372d65-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"677f5eb5-79ad-4366-8623-cbb385372d65\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.522416 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/677f5eb5-79ad-4366-8623-cbb385372d65-logs\") pod \"glance-default-internal-api-0\" (UID: \"677f5eb5-79ad-4366-8623-cbb385372d65\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.526448 4779 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"677f5eb5-79ad-4366-8623-cbb385372d65\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/glance-default-internal-api-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.530573 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/677f5eb5-79ad-4366-8623-cbb385372d65-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"677f5eb5-79ad-4366-8623-cbb385372d65\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.531170 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.534825 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/677f5eb5-79ad-4366-8623-cbb385372d65-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"677f5eb5-79ad-4366-8623-cbb385372d65\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.536118 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/677f5eb5-79ad-4366-8623-cbb385372d65-scripts\") pod \"glance-default-internal-api-0\" (UID: \"677f5eb5-79ad-4366-8623-cbb385372d65\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.546560 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/677f5eb5-79ad-4366-8623-cbb385372d65-config-data\") pod \"glance-default-internal-api-0\" (UID: \"677f5eb5-79ad-4366-8623-cbb385372d65\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.557954 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nsts4\" (UniqueName: \"kubernetes.io/projected/677f5eb5-79ad-4366-8623-cbb385372d65-kube-api-access-nsts4\") pod \"glance-default-internal-api-0\" (UID: \"677f5eb5-79ad-4366-8623-cbb385372d65\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.602868 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/677f5eb5-79ad-4366-8623-cbb385372d65-combined-ca-bundle\") pod \"677f5eb5-79ad-4366-8623-cbb385372d65\" (UID: \"677f5eb5-79ad-4366-8623-cbb385372d65\") " Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.603044 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/677f5eb5-79ad-4366-8623-cbb385372d65-scripts\") pod \"677f5eb5-79ad-4366-8623-cbb385372d65\" (UID: \"677f5eb5-79ad-4366-8623-cbb385372d65\") " Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.603079 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nsts4\" (UniqueName: \"kubernetes.io/projected/677f5eb5-79ad-4366-8623-cbb385372d65-kube-api-access-nsts4\") pod \"677f5eb5-79ad-4366-8623-cbb385372d65\" (UID: \"677f5eb5-79ad-4366-8623-cbb385372d65\") " Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.603142 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/677f5eb5-79ad-4366-8623-cbb385372d65-internal-tls-certs\") pod \"677f5eb5-79ad-4366-8623-cbb385372d65\" (UID: \"677f5eb5-79ad-4366-8623-cbb385372d65\") " Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.603202 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/677f5eb5-79ad-4366-8623-cbb385372d65-ceph\") pod \"677f5eb5-79ad-4366-8623-cbb385372d65\" (UID: \"677f5eb5-79ad-4366-8623-cbb385372d65\") " Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.603284 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/677f5eb5-79ad-4366-8623-cbb385372d65-config-data\") pod \"677f5eb5-79ad-4366-8623-cbb385372d65\" (UID: \"677f5eb5-79ad-4366-8623-cbb385372d65\") " Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.603344 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/677f5eb5-79ad-4366-8623-cbb385372d65-logs\") pod \"677f5eb5-79ad-4366-8623-cbb385372d65\" (UID: \"677f5eb5-79ad-4366-8623-cbb385372d65\") " Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.603376 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/677f5eb5-79ad-4366-8623-cbb385372d65-httpd-run\") pod \"677f5eb5-79ad-4366-8623-cbb385372d65\" (UID: \"677f5eb5-79ad-4366-8623-cbb385372d65\") " Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.604215 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/677f5eb5-79ad-4366-8623-cbb385372d65-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "677f5eb5-79ad-4366-8623-cbb385372d65" (UID: "677f5eb5-79ad-4366-8623-cbb385372d65"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.618064 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/677f5eb5-79ad-4366-8623-cbb385372d65-logs" (OuterVolumeSpecName: "logs") pod "677f5eb5-79ad-4366-8623-cbb385372d65" (UID: "677f5eb5-79ad-4366-8623-cbb385372d65"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.621561 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/677f5eb5-79ad-4366-8623-cbb385372d65-config-data" (OuterVolumeSpecName: "config-data") pod "677f5eb5-79ad-4366-8623-cbb385372d65" (UID: "677f5eb5-79ad-4366-8623-cbb385372d65"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.625422 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/677f5eb5-79ad-4366-8623-cbb385372d65-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "677f5eb5-79ad-4366-8623-cbb385372d65" (UID: "677f5eb5-79ad-4366-8623-cbb385372d65"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.625524 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/677f5eb5-79ad-4366-8623-cbb385372d65-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "677f5eb5-79ad-4366-8623-cbb385372d65" (UID: "677f5eb5-79ad-4366-8623-cbb385372d65"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.625678 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/677f5eb5-79ad-4366-8623-cbb385372d65-ceph" (OuterVolumeSpecName: "ceph") pod "677f5eb5-79ad-4366-8623-cbb385372d65" (UID: "677f5eb5-79ad-4366-8623-cbb385372d65"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.631783 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"677f5eb5-79ad-4366-8623-cbb385372d65\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.644169 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/677f5eb5-79ad-4366-8623-cbb385372d65-kube-api-access-nsts4" (OuterVolumeSpecName: "kube-api-access-nsts4") pod "677f5eb5-79ad-4366-8623-cbb385372d65" (UID: "677f5eb5-79ad-4366-8623-cbb385372d65"). InnerVolumeSpecName "kube-api-access-nsts4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.648263 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/677f5eb5-79ad-4366-8623-cbb385372d65-scripts" (OuterVolumeSpecName: "scripts") pod "677f5eb5-79ad-4366-8623-cbb385372d65" (UID: "677f5eb5-79ad-4366-8623-cbb385372d65"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.653533 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.705764 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"677f5eb5-79ad-4366-8623-cbb385372d65\" (UID: \"677f5eb5-79ad-4366-8623-cbb385372d65\") " Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.706463 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/677f5eb5-79ad-4366-8623-cbb385372d65-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.706487 4779 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/677f5eb5-79ad-4366-8623-cbb385372d65-logs\") on node \"crc\" DevicePath \"\"" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.706495 4779 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/677f5eb5-79ad-4366-8623-cbb385372d65-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.706505 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/677f5eb5-79ad-4366-8623-cbb385372d65-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.706514 4779 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/677f5eb5-79ad-4366-8623-cbb385372d65-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.706523 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nsts4\" (UniqueName: \"kubernetes.io/projected/677f5eb5-79ad-4366-8623-cbb385372d65-kube-api-access-nsts4\") on node \"crc\" DevicePath \"\"" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.706531 4779 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/677f5eb5-79ad-4366-8623-cbb385372d65-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.706539 4779 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/677f5eb5-79ad-4366-8623-cbb385372d65-ceph\") on node \"crc\" DevicePath \"\"" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.716957 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "glance") pod "677f5eb5-79ad-4366-8623-cbb385372d65" (UID: "677f5eb5-79ad-4366-8623-cbb385372d65"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 29 10:25:31 crc kubenswrapper[4779]: W0929 10:25:31.780964 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc72961de_9fe8_4a03_b6df_d12de65986f1.slice/crio-645549a34bb36bd3fefdd266bfe3006298e5b6b9c6f56d0c6689aa3ab4d8e955 WatchSource:0}: Error finding container 645549a34bb36bd3fefdd266bfe3006298e5b6b9c6f56d0c6689aa3ab4d8e955: Status 404 returned error can't find the container with id 645549a34bb36bd3fefdd266bfe3006298e5b6b9c6f56d0c6689aa3ab4d8e955 Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.790900 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-volume1-0"] Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.809144 4779 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.836797 4779 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.911082 4779 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Sep 29 10:25:31 crc kubenswrapper[4779]: I0929 10:25:31.931536 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-8658bb8f6f-ltnkm"] Sep 29 10:25:32 crc kubenswrapper[4779]: I0929 10:25:32.188286 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-675f5c7fcc-8k9c8"] Sep 29 10:25:32 crc kubenswrapper[4779]: I0929 10:25:32.293229 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 10:25:32 crc kubenswrapper[4779]: I0929 10:25:32.380881 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-675f5c7fcc-8k9c8" event={"ID":"27d399f1-4a87-47f3-8b0c-6a28960f18aa","Type":"ContainerStarted","Data":"8cfc15092a133a3017db09e6b305d86de2463a4a92a0a9e9b868fd91f1f17011"} Sep 29 10:25:32 crc kubenswrapper[4779]: I0929 10:25:32.385300 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"c72961de-9fe8-4a03-b6df-d12de65986f1","Type":"ContainerStarted","Data":"645549a34bb36bd3fefdd266bfe3006298e5b6b9c6f56d0c6689aa3ab4d8e955"} Sep 29 10:25:32 crc kubenswrapper[4779]: I0929 10:25:32.388312 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-8658bb8f6f-ltnkm" event={"ID":"38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7","Type":"ContainerStarted","Data":"c2963ceef4cf641bd72fcc634360728b3c5e2ca0f357cb26e376aa89e08c9c41"} Sep 29 10:25:32 crc kubenswrapper[4779]: I0929 10:25:32.389359 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2","Type":"ContainerStarted","Data":"e39406f8282c592727ce8ada1793a9919b9f9ecd0f41bb195987412aa6cbf76b"} Sep 29 10:25:32 crc kubenswrapper[4779]: I0929 10:25:32.389408 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 10:25:32 crc kubenswrapper[4779]: I0929 10:25:32.484482 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 10:25:32 crc kubenswrapper[4779]: I0929 10:25:32.525482 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 10:25:32 crc kubenswrapper[4779]: I0929 10:25:32.554530 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 10:25:32 crc kubenswrapper[4779]: I0929 10:25:32.559670 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 10:25:32 crc kubenswrapper[4779]: I0929 10:25:32.565299 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Sep 29 10:25:32 crc kubenswrapper[4779]: I0929 10:25:32.565347 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Sep 29 10:25:32 crc kubenswrapper[4779]: I0929 10:25:32.577003 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 10:25:32 crc kubenswrapper[4779]: I0929 10:25:32.635213 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5a29c423-dcec-471e-9dd0-f1032e3f2dc4-logs\") pod \"glance-default-internal-api-0\" (UID: \"5a29c423-dcec-471e-9dd0-f1032e3f2dc4\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:32 crc kubenswrapper[4779]: I0929 10:25:32.635329 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5a29c423-dcec-471e-9dd0-f1032e3f2dc4-scripts\") pod \"glance-default-internal-api-0\" (UID: \"5a29c423-dcec-471e-9dd0-f1032e3f2dc4\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:32 crc kubenswrapper[4779]: I0929 10:25:32.635415 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a29c423-dcec-471e-9dd0-f1032e3f2dc4-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"5a29c423-dcec-471e-9dd0-f1032e3f2dc4\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:32 crc kubenswrapper[4779]: I0929 10:25:32.635460 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/5a29c423-dcec-471e-9dd0-f1032e3f2dc4-ceph\") pod \"glance-default-internal-api-0\" (UID: \"5a29c423-dcec-471e-9dd0-f1032e3f2dc4\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:32 crc kubenswrapper[4779]: I0929 10:25:32.635660 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8v7h6\" (UniqueName: \"kubernetes.io/projected/5a29c423-dcec-471e-9dd0-f1032e3f2dc4-kube-api-access-8v7h6\") pod \"glance-default-internal-api-0\" (UID: \"5a29c423-dcec-471e-9dd0-f1032e3f2dc4\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:32 crc kubenswrapper[4779]: I0929 10:25:32.636034 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a29c423-dcec-471e-9dd0-f1032e3f2dc4-config-data\") pod \"glance-default-internal-api-0\" (UID: \"5a29c423-dcec-471e-9dd0-f1032e3f2dc4\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:32 crc kubenswrapper[4779]: I0929 10:25:32.636102 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5a29c423-dcec-471e-9dd0-f1032e3f2dc4-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"5a29c423-dcec-471e-9dd0-f1032e3f2dc4\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:32 crc kubenswrapper[4779]: I0929 10:25:32.636220 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"5a29c423-dcec-471e-9dd0-f1032e3f2dc4\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:32 crc kubenswrapper[4779]: I0929 10:25:32.636263 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a29c423-dcec-471e-9dd0-f1032e3f2dc4-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"5a29c423-dcec-471e-9dd0-f1032e3f2dc4\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:32 crc kubenswrapper[4779]: I0929 10:25:32.715634 4779 scope.go:117] "RemoveContainer" containerID="a993e5a0edbe6448eaf61b4b45ffde78e59ae455e408d7dd1afa5a0ff12dc338" Sep 29 10:25:32 crc kubenswrapper[4779]: E0929 10:25:32.716044 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:25:32 crc kubenswrapper[4779]: I0929 10:25:32.732252 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="677f5eb5-79ad-4366-8623-cbb385372d65" path="/var/lib/kubelet/pods/677f5eb5-79ad-4366-8623-cbb385372d65/volumes" Sep 29 10:25:32 crc kubenswrapper[4779]: I0929 10:25:32.741915 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a29c423-dcec-471e-9dd0-f1032e3f2dc4-config-data\") pod \"glance-default-internal-api-0\" (UID: \"5a29c423-dcec-471e-9dd0-f1032e3f2dc4\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:32 crc kubenswrapper[4779]: I0929 10:25:32.741975 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5a29c423-dcec-471e-9dd0-f1032e3f2dc4-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"5a29c423-dcec-471e-9dd0-f1032e3f2dc4\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:32 crc kubenswrapper[4779]: I0929 10:25:32.742024 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"5a29c423-dcec-471e-9dd0-f1032e3f2dc4\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:32 crc kubenswrapper[4779]: I0929 10:25:32.742048 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a29c423-dcec-471e-9dd0-f1032e3f2dc4-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"5a29c423-dcec-471e-9dd0-f1032e3f2dc4\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:32 crc kubenswrapper[4779]: I0929 10:25:32.742105 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5a29c423-dcec-471e-9dd0-f1032e3f2dc4-logs\") pod \"glance-default-internal-api-0\" (UID: \"5a29c423-dcec-471e-9dd0-f1032e3f2dc4\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:32 crc kubenswrapper[4779]: I0929 10:25:32.742189 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5a29c423-dcec-471e-9dd0-f1032e3f2dc4-scripts\") pod \"glance-default-internal-api-0\" (UID: \"5a29c423-dcec-471e-9dd0-f1032e3f2dc4\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:32 crc kubenswrapper[4779]: I0929 10:25:32.742269 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a29c423-dcec-471e-9dd0-f1032e3f2dc4-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"5a29c423-dcec-471e-9dd0-f1032e3f2dc4\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:32 crc kubenswrapper[4779]: I0929 10:25:32.742420 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/5a29c423-dcec-471e-9dd0-f1032e3f2dc4-ceph\") pod \"glance-default-internal-api-0\" (UID: \"5a29c423-dcec-471e-9dd0-f1032e3f2dc4\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:32 crc kubenswrapper[4779]: I0929 10:25:32.742488 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8v7h6\" (UniqueName: \"kubernetes.io/projected/5a29c423-dcec-471e-9dd0-f1032e3f2dc4-kube-api-access-8v7h6\") pod \"glance-default-internal-api-0\" (UID: \"5a29c423-dcec-471e-9dd0-f1032e3f2dc4\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:32 crc kubenswrapper[4779]: I0929 10:25:32.742778 4779 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"5a29c423-dcec-471e-9dd0-f1032e3f2dc4\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/glance-default-internal-api-0" Sep 29 10:25:32 crc kubenswrapper[4779]: I0929 10:25:32.743033 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5a29c423-dcec-471e-9dd0-f1032e3f2dc4-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"5a29c423-dcec-471e-9dd0-f1032e3f2dc4\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:32 crc kubenswrapper[4779]: I0929 10:25:32.757822 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5a29c423-dcec-471e-9dd0-f1032e3f2dc4-logs\") pod \"glance-default-internal-api-0\" (UID: \"5a29c423-dcec-471e-9dd0-f1032e3f2dc4\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:32 crc kubenswrapper[4779]: I0929 10:25:32.837669 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a29c423-dcec-471e-9dd0-f1032e3f2dc4-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"5a29c423-dcec-471e-9dd0-f1032e3f2dc4\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:32 crc kubenswrapper[4779]: I0929 10:25:32.837801 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a29c423-dcec-471e-9dd0-f1032e3f2dc4-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"5a29c423-dcec-471e-9dd0-f1032e3f2dc4\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:32 crc kubenswrapper[4779]: I0929 10:25:32.838555 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a29c423-dcec-471e-9dd0-f1032e3f2dc4-config-data\") pod \"glance-default-internal-api-0\" (UID: \"5a29c423-dcec-471e-9dd0-f1032e3f2dc4\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:32 crc kubenswrapper[4779]: I0929 10:25:32.839072 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8v7h6\" (UniqueName: \"kubernetes.io/projected/5a29c423-dcec-471e-9dd0-f1032e3f2dc4-kube-api-access-8v7h6\") pod \"glance-default-internal-api-0\" (UID: \"5a29c423-dcec-471e-9dd0-f1032e3f2dc4\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:32 crc kubenswrapper[4779]: I0929 10:25:32.839433 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/5a29c423-dcec-471e-9dd0-f1032e3f2dc4-ceph\") pod \"glance-default-internal-api-0\" (UID: \"5a29c423-dcec-471e-9dd0-f1032e3f2dc4\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:32 crc kubenswrapper[4779]: I0929 10:25:32.843549 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5a29c423-dcec-471e-9dd0-f1032e3f2dc4-scripts\") pod \"glance-default-internal-api-0\" (UID: \"5a29c423-dcec-471e-9dd0-f1032e3f2dc4\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:32 crc kubenswrapper[4779]: I0929 10:25:32.924854 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-volume2-0"] Sep 29 10:25:32 crc kubenswrapper[4779]: I0929 10:25:32.953335 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"5a29c423-dcec-471e-9dd0-f1032e3f2dc4\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:32 crc kubenswrapper[4779]: W0929 10:25:32.964763 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod79feadef_f3f0_4d05_94ce_9edadb69bb6e.slice/crio-376174cc29d578863de07951414d0f7a6b63a328371b06ba5e414402390e6dbe WatchSource:0}: Error finding container 376174cc29d578863de07951414d0f7a6b63a328371b06ba5e414402390e6dbe: Status 404 returned error can't find the container with id 376174cc29d578863de07951414d0f7a6b63a328371b06ba5e414402390e6dbe Sep 29 10:25:33 crc kubenswrapper[4779]: I0929 10:25:33.064603 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 10:25:33 crc kubenswrapper[4779]: I0929 10:25:33.405804 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2","Type":"ContainerStarted","Data":"33786b7e9839c55345a628d177f2834b4e991c537a89261eb503c04c991de2bd"} Sep 29 10:25:33 crc kubenswrapper[4779]: I0929 10:25:33.408618 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume2-0" event={"ID":"79feadef-f3f0-4d05-94ce-9edadb69bb6e","Type":"ContainerStarted","Data":"376174cc29d578863de07951414d0f7a6b63a328371b06ba5e414402390e6dbe"} Sep 29 10:25:33 crc kubenswrapper[4779]: I0929 10:25:33.411824 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"c72961de-9fe8-4a03-b6df-d12de65986f1","Type":"ContainerStarted","Data":"e7f38f10386061ef1124e560bbcde84d48709323e2ac0f93cba58049c7db3b43"} Sep 29 10:25:33 crc kubenswrapper[4779]: I0929 10:25:33.420491 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"23901a8a-449b-4852-8b75-c1615c0501f7","Type":"ContainerStarted","Data":"6303e33bb3e15415a4603ec578b5b499298eff107810fd9def551e355e8ec4af"} Sep 29 10:25:33 crc kubenswrapper[4779]: I0929 10:25:33.662071 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-675f5c7fcc-8k9c8"] Sep 29 10:25:33 crc kubenswrapper[4779]: I0929 10:25:33.740656 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-54748d8d4-lx86m"] Sep 29 10:25:33 crc kubenswrapper[4779]: I0929 10:25:33.743630 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-54748d8d4-lx86m" Sep 29 10:25:33 crc kubenswrapper[4779]: I0929 10:25:33.758937 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-horizon-svc" Sep 29 10:25:33 crc kubenswrapper[4779]: I0929 10:25:33.764024 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 10:25:33 crc kubenswrapper[4779]: I0929 10:25:33.785976 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-54748d8d4-lx86m"] Sep 29 10:25:33 crc kubenswrapper[4779]: I0929 10:25:33.794474 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/17e72d33-8852-486a-8973-fe22a52f6e00-horizon-secret-key\") pod \"horizon-54748d8d4-lx86m\" (UID: \"17e72d33-8852-486a-8973-fe22a52f6e00\") " pod="openstack/horizon-54748d8d4-lx86m" Sep 29 10:25:33 crc kubenswrapper[4779]: I0929 10:25:33.794709 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/17e72d33-8852-486a-8973-fe22a52f6e00-logs\") pod \"horizon-54748d8d4-lx86m\" (UID: \"17e72d33-8852-486a-8973-fe22a52f6e00\") " pod="openstack/horizon-54748d8d4-lx86m" Sep 29 10:25:33 crc kubenswrapper[4779]: I0929 10:25:33.795046 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17e72d33-8852-486a-8973-fe22a52f6e00-combined-ca-bundle\") pod \"horizon-54748d8d4-lx86m\" (UID: \"17e72d33-8852-486a-8973-fe22a52f6e00\") " pod="openstack/horizon-54748d8d4-lx86m" Sep 29 10:25:33 crc kubenswrapper[4779]: I0929 10:25:33.795218 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/17e72d33-8852-486a-8973-fe22a52f6e00-config-data\") pod \"horizon-54748d8d4-lx86m\" (UID: \"17e72d33-8852-486a-8973-fe22a52f6e00\") " pod="openstack/horizon-54748d8d4-lx86m" Sep 29 10:25:33 crc kubenswrapper[4779]: I0929 10:25:33.795303 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q765s\" (UniqueName: \"kubernetes.io/projected/17e72d33-8852-486a-8973-fe22a52f6e00-kube-api-access-q765s\") pod \"horizon-54748d8d4-lx86m\" (UID: \"17e72d33-8852-486a-8973-fe22a52f6e00\") " pod="openstack/horizon-54748d8d4-lx86m" Sep 29 10:25:33 crc kubenswrapper[4779]: I0929 10:25:33.795386 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/17e72d33-8852-486a-8973-fe22a52f6e00-scripts\") pod \"horizon-54748d8d4-lx86m\" (UID: \"17e72d33-8852-486a-8973-fe22a52f6e00\") " pod="openstack/horizon-54748d8d4-lx86m" Sep 29 10:25:33 crc kubenswrapper[4779]: I0929 10:25:33.795499 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/17e72d33-8852-486a-8973-fe22a52f6e00-horizon-tls-certs\") pod \"horizon-54748d8d4-lx86m\" (UID: \"17e72d33-8852-486a-8973-fe22a52f6e00\") " pod="openstack/horizon-54748d8d4-lx86m" Sep 29 10:25:33 crc kubenswrapper[4779]: I0929 10:25:33.803171 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 10:25:33 crc kubenswrapper[4779]: W0929 10:25:33.857273 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5a29c423_dcec_471e_9dd0_f1032e3f2dc4.slice/crio-fe02c124ae04a13f6b5fd6cdb03aeb966420561306796841dc449ac611d3e562 WatchSource:0}: Error finding container fe02c124ae04a13f6b5fd6cdb03aeb966420561306796841dc449ac611d3e562: Status 404 returned error can't find the container with id fe02c124ae04a13f6b5fd6cdb03aeb966420561306796841dc449ac611d3e562 Sep 29 10:25:33 crc kubenswrapper[4779]: I0929 10:25:33.875493 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-8658bb8f6f-ltnkm"] Sep 29 10:25:33 crc kubenswrapper[4779]: I0929 10:25:33.897735 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/17e72d33-8852-486a-8973-fe22a52f6e00-config-data\") pod \"horizon-54748d8d4-lx86m\" (UID: \"17e72d33-8852-486a-8973-fe22a52f6e00\") " pod="openstack/horizon-54748d8d4-lx86m" Sep 29 10:25:33 crc kubenswrapper[4779]: I0929 10:25:33.897777 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q765s\" (UniqueName: \"kubernetes.io/projected/17e72d33-8852-486a-8973-fe22a52f6e00-kube-api-access-q765s\") pod \"horizon-54748d8d4-lx86m\" (UID: \"17e72d33-8852-486a-8973-fe22a52f6e00\") " pod="openstack/horizon-54748d8d4-lx86m" Sep 29 10:25:33 crc kubenswrapper[4779]: I0929 10:25:33.897803 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/17e72d33-8852-486a-8973-fe22a52f6e00-scripts\") pod \"horizon-54748d8d4-lx86m\" (UID: \"17e72d33-8852-486a-8973-fe22a52f6e00\") " pod="openstack/horizon-54748d8d4-lx86m" Sep 29 10:25:33 crc kubenswrapper[4779]: I0929 10:25:33.897841 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/17e72d33-8852-486a-8973-fe22a52f6e00-horizon-tls-certs\") pod \"horizon-54748d8d4-lx86m\" (UID: \"17e72d33-8852-486a-8973-fe22a52f6e00\") " pod="openstack/horizon-54748d8d4-lx86m" Sep 29 10:25:33 crc kubenswrapper[4779]: I0929 10:25:33.897874 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/17e72d33-8852-486a-8973-fe22a52f6e00-horizon-secret-key\") pod \"horizon-54748d8d4-lx86m\" (UID: \"17e72d33-8852-486a-8973-fe22a52f6e00\") " pod="openstack/horizon-54748d8d4-lx86m" Sep 29 10:25:33 crc kubenswrapper[4779]: I0929 10:25:33.897934 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/17e72d33-8852-486a-8973-fe22a52f6e00-logs\") pod \"horizon-54748d8d4-lx86m\" (UID: \"17e72d33-8852-486a-8973-fe22a52f6e00\") " pod="openstack/horizon-54748d8d4-lx86m" Sep 29 10:25:33 crc kubenswrapper[4779]: I0929 10:25:33.897993 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17e72d33-8852-486a-8973-fe22a52f6e00-combined-ca-bundle\") pod \"horizon-54748d8d4-lx86m\" (UID: \"17e72d33-8852-486a-8973-fe22a52f6e00\") " pod="openstack/horizon-54748d8d4-lx86m" Sep 29 10:25:33 crc kubenswrapper[4779]: I0929 10:25:33.899683 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/17e72d33-8852-486a-8973-fe22a52f6e00-scripts\") pod \"horizon-54748d8d4-lx86m\" (UID: \"17e72d33-8852-486a-8973-fe22a52f6e00\") " pod="openstack/horizon-54748d8d4-lx86m" Sep 29 10:25:33 crc kubenswrapper[4779]: I0929 10:25:33.901058 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/17e72d33-8852-486a-8973-fe22a52f6e00-config-data\") pod \"horizon-54748d8d4-lx86m\" (UID: \"17e72d33-8852-486a-8973-fe22a52f6e00\") " pod="openstack/horizon-54748d8d4-lx86m" Sep 29 10:25:33 crc kubenswrapper[4779]: I0929 10:25:33.906697 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/17e72d33-8852-486a-8973-fe22a52f6e00-logs\") pod \"horizon-54748d8d4-lx86m\" (UID: \"17e72d33-8852-486a-8973-fe22a52f6e00\") " pod="openstack/horizon-54748d8d4-lx86m" Sep 29 10:25:33 crc kubenswrapper[4779]: I0929 10:25:33.913953 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/17e72d33-8852-486a-8973-fe22a52f6e00-horizon-tls-certs\") pod \"horizon-54748d8d4-lx86m\" (UID: \"17e72d33-8852-486a-8973-fe22a52f6e00\") " pod="openstack/horizon-54748d8d4-lx86m" Sep 29 10:25:33 crc kubenswrapper[4779]: I0929 10:25:33.920974 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-7d46749f98-lswks"] Sep 29 10:25:33 crc kubenswrapper[4779]: I0929 10:25:33.930960 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/17e72d33-8852-486a-8973-fe22a52f6e00-horizon-secret-key\") pod \"horizon-54748d8d4-lx86m\" (UID: \"17e72d33-8852-486a-8973-fe22a52f6e00\") " pod="openstack/horizon-54748d8d4-lx86m" Sep 29 10:25:33 crc kubenswrapper[4779]: I0929 10:25:33.931694 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7d46749f98-lswks" Sep 29 10:25:33 crc kubenswrapper[4779]: I0929 10:25:33.931787 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17e72d33-8852-486a-8973-fe22a52f6e00-combined-ca-bundle\") pod \"horizon-54748d8d4-lx86m\" (UID: \"17e72d33-8852-486a-8973-fe22a52f6e00\") " pod="openstack/horizon-54748d8d4-lx86m" Sep 29 10:25:33 crc kubenswrapper[4779]: I0929 10:25:33.952602 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7d46749f98-lswks"] Sep 29 10:25:33 crc kubenswrapper[4779]: I0929 10:25:33.968731 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 10:25:33 crc kubenswrapper[4779]: I0929 10:25:33.980851 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q765s\" (UniqueName: \"kubernetes.io/projected/17e72d33-8852-486a-8973-fe22a52f6e00-kube-api-access-q765s\") pod \"horizon-54748d8d4-lx86m\" (UID: \"17e72d33-8852-486a-8973-fe22a52f6e00\") " pod="openstack/horizon-54748d8d4-lx86m" Sep 29 10:25:34 crc kubenswrapper[4779]: I0929 10:25:34.106362 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-54748d8d4-lx86m" Sep 29 10:25:34 crc kubenswrapper[4779]: I0929 10:25:34.117376 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e4ac544-73b5-4ec4-975d-83eac168a331-combined-ca-bundle\") pod \"horizon-7d46749f98-lswks\" (UID: \"4e4ac544-73b5-4ec4-975d-83eac168a331\") " pod="openstack/horizon-7d46749f98-lswks" Sep 29 10:25:34 crc kubenswrapper[4779]: I0929 10:25:34.117434 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-spfg4\" (UniqueName: \"kubernetes.io/projected/4e4ac544-73b5-4ec4-975d-83eac168a331-kube-api-access-spfg4\") pod \"horizon-7d46749f98-lswks\" (UID: \"4e4ac544-73b5-4ec4-975d-83eac168a331\") " pod="openstack/horizon-7d46749f98-lswks" Sep 29 10:25:34 crc kubenswrapper[4779]: I0929 10:25:34.117489 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4e4ac544-73b5-4ec4-975d-83eac168a331-logs\") pod \"horizon-7d46749f98-lswks\" (UID: \"4e4ac544-73b5-4ec4-975d-83eac168a331\") " pod="openstack/horizon-7d46749f98-lswks" Sep 29 10:25:34 crc kubenswrapper[4779]: I0929 10:25:34.117595 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4e4ac544-73b5-4ec4-975d-83eac168a331-config-data\") pod \"horizon-7d46749f98-lswks\" (UID: \"4e4ac544-73b5-4ec4-975d-83eac168a331\") " pod="openstack/horizon-7d46749f98-lswks" Sep 29 10:25:34 crc kubenswrapper[4779]: I0929 10:25:34.117636 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e4ac544-73b5-4ec4-975d-83eac168a331-horizon-tls-certs\") pod \"horizon-7d46749f98-lswks\" (UID: \"4e4ac544-73b5-4ec4-975d-83eac168a331\") " pod="openstack/horizon-7d46749f98-lswks" Sep 29 10:25:34 crc kubenswrapper[4779]: I0929 10:25:34.117703 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/4e4ac544-73b5-4ec4-975d-83eac168a331-horizon-secret-key\") pod \"horizon-7d46749f98-lswks\" (UID: \"4e4ac544-73b5-4ec4-975d-83eac168a331\") " pod="openstack/horizon-7d46749f98-lswks" Sep 29 10:25:34 crc kubenswrapper[4779]: I0929 10:25:34.117724 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4e4ac544-73b5-4ec4-975d-83eac168a331-scripts\") pod \"horizon-7d46749f98-lswks\" (UID: \"4e4ac544-73b5-4ec4-975d-83eac168a331\") " pod="openstack/horizon-7d46749f98-lswks" Sep 29 10:25:34 crc kubenswrapper[4779]: I0929 10:25:34.220146 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4e4ac544-73b5-4ec4-975d-83eac168a331-config-data\") pod \"horizon-7d46749f98-lswks\" (UID: \"4e4ac544-73b5-4ec4-975d-83eac168a331\") " pod="openstack/horizon-7d46749f98-lswks" Sep 29 10:25:34 crc kubenswrapper[4779]: I0929 10:25:34.220205 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e4ac544-73b5-4ec4-975d-83eac168a331-horizon-tls-certs\") pod \"horizon-7d46749f98-lswks\" (UID: \"4e4ac544-73b5-4ec4-975d-83eac168a331\") " pod="openstack/horizon-7d46749f98-lswks" Sep 29 10:25:34 crc kubenswrapper[4779]: I0929 10:25:34.220278 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/4e4ac544-73b5-4ec4-975d-83eac168a331-horizon-secret-key\") pod \"horizon-7d46749f98-lswks\" (UID: \"4e4ac544-73b5-4ec4-975d-83eac168a331\") " pod="openstack/horizon-7d46749f98-lswks" Sep 29 10:25:34 crc kubenswrapper[4779]: I0929 10:25:34.220307 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4e4ac544-73b5-4ec4-975d-83eac168a331-scripts\") pod \"horizon-7d46749f98-lswks\" (UID: \"4e4ac544-73b5-4ec4-975d-83eac168a331\") " pod="openstack/horizon-7d46749f98-lswks" Sep 29 10:25:34 crc kubenswrapper[4779]: I0929 10:25:34.220354 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e4ac544-73b5-4ec4-975d-83eac168a331-combined-ca-bundle\") pod \"horizon-7d46749f98-lswks\" (UID: \"4e4ac544-73b5-4ec4-975d-83eac168a331\") " pod="openstack/horizon-7d46749f98-lswks" Sep 29 10:25:34 crc kubenswrapper[4779]: I0929 10:25:34.221531 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4e4ac544-73b5-4ec4-975d-83eac168a331-config-data\") pod \"horizon-7d46749f98-lswks\" (UID: \"4e4ac544-73b5-4ec4-975d-83eac168a331\") " pod="openstack/horizon-7d46749f98-lswks" Sep 29 10:25:34 crc kubenswrapper[4779]: I0929 10:25:34.225336 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e4ac544-73b5-4ec4-975d-83eac168a331-horizon-tls-certs\") pod \"horizon-7d46749f98-lswks\" (UID: \"4e4ac544-73b5-4ec4-975d-83eac168a331\") " pod="openstack/horizon-7d46749f98-lswks" Sep 29 10:25:34 crc kubenswrapper[4779]: I0929 10:25:34.231461 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4e4ac544-73b5-4ec4-975d-83eac168a331-scripts\") pod \"horizon-7d46749f98-lswks\" (UID: \"4e4ac544-73b5-4ec4-975d-83eac168a331\") " pod="openstack/horizon-7d46749f98-lswks" Sep 29 10:25:34 crc kubenswrapper[4779]: I0929 10:25:34.232623 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-spfg4\" (UniqueName: \"kubernetes.io/projected/4e4ac544-73b5-4ec4-975d-83eac168a331-kube-api-access-spfg4\") pod \"horizon-7d46749f98-lswks\" (UID: \"4e4ac544-73b5-4ec4-975d-83eac168a331\") " pod="openstack/horizon-7d46749f98-lswks" Sep 29 10:25:34 crc kubenswrapper[4779]: I0929 10:25:34.232788 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4e4ac544-73b5-4ec4-975d-83eac168a331-logs\") pod \"horizon-7d46749f98-lswks\" (UID: \"4e4ac544-73b5-4ec4-975d-83eac168a331\") " pod="openstack/horizon-7d46749f98-lswks" Sep 29 10:25:34 crc kubenswrapper[4779]: I0929 10:25:34.233441 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4e4ac544-73b5-4ec4-975d-83eac168a331-logs\") pod \"horizon-7d46749f98-lswks\" (UID: \"4e4ac544-73b5-4ec4-975d-83eac168a331\") " pod="openstack/horizon-7d46749f98-lswks" Sep 29 10:25:34 crc kubenswrapper[4779]: I0929 10:25:34.234878 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/4e4ac544-73b5-4ec4-975d-83eac168a331-horizon-secret-key\") pod \"horizon-7d46749f98-lswks\" (UID: \"4e4ac544-73b5-4ec4-975d-83eac168a331\") " pod="openstack/horizon-7d46749f98-lswks" Sep 29 10:25:34 crc kubenswrapper[4779]: I0929 10:25:34.237974 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e4ac544-73b5-4ec4-975d-83eac168a331-combined-ca-bundle\") pod \"horizon-7d46749f98-lswks\" (UID: \"4e4ac544-73b5-4ec4-975d-83eac168a331\") " pod="openstack/horizon-7d46749f98-lswks" Sep 29 10:25:34 crc kubenswrapper[4779]: I0929 10:25:34.272279 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-spfg4\" (UniqueName: \"kubernetes.io/projected/4e4ac544-73b5-4ec4-975d-83eac168a331-kube-api-access-spfg4\") pod \"horizon-7d46749f98-lswks\" (UID: \"4e4ac544-73b5-4ec4-975d-83eac168a331\") " pod="openstack/horizon-7d46749f98-lswks" Sep 29 10:25:34 crc kubenswrapper[4779]: I0929 10:25:34.486365 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7d46749f98-lswks" Sep 29 10:25:34 crc kubenswrapper[4779]: I0929 10:25:34.490712 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5a29c423-dcec-471e-9dd0-f1032e3f2dc4","Type":"ContainerStarted","Data":"fe02c124ae04a13f6b5fd6cdb03aeb966420561306796841dc449ac611d3e562"} Sep 29 10:25:34 crc kubenswrapper[4779]: I0929 10:25:34.514732 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"23901a8a-449b-4852-8b75-c1615c0501f7","Type":"ContainerStarted","Data":"ce6bf8934c3dcdfea64f850eb2ecc6fddbf647a1de2a55198e415dc8600e5716"} Sep 29 10:25:34 crc kubenswrapper[4779]: I0929 10:25:34.567538 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2","Type":"ContainerStarted","Data":"05ed675fac49e3a0ceb3c82d20513c0c7a3ce069fe5e01bf43a43997c7731e06"} Sep 29 10:25:34 crc kubenswrapper[4779]: I0929 10:25:34.619607 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume2-0" event={"ID":"79feadef-f3f0-4d05-94ce-9edadb69bb6e","Type":"ContainerStarted","Data":"6cd1cb8e52d03c736ba36d2c643b041b78d1c7a05240c2a50d7f0871921d2c9f"} Sep 29 10:25:34 crc kubenswrapper[4779]: I0929 10:25:34.620155 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume2-0" event={"ID":"79feadef-f3f0-4d05-94ce-9edadb69bb6e","Type":"ContainerStarted","Data":"7670eeccbac13367f6ae9f23a0dfb95af9c425f1c33460a305ff4fd5f71c12ad"} Sep 29 10:25:34 crc kubenswrapper[4779]: I0929 10:25:34.662394 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-backup-0" podStartSLOduration=3.91232814 podStartE2EDuration="4.662365313s" podCreationTimestamp="2025-09-29 10:25:30 +0000 UTC" firstStartedPulling="2025-09-29 10:25:31.65566126 +0000 UTC m=+3363.636985164" lastFinishedPulling="2025-09-29 10:25:32.405698443 +0000 UTC m=+3364.387022337" observedRunningTime="2025-09-29 10:25:34.647270684 +0000 UTC m=+3366.628594588" watchObservedRunningTime="2025-09-29 10:25:34.662365313 +0000 UTC m=+3366.643689217" Sep 29 10:25:34 crc kubenswrapper[4779]: I0929 10:25:34.677853 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-volume-volume2-0" podStartSLOduration=4.677829463 podStartE2EDuration="4.677829463s" podCreationTimestamp="2025-09-29 10:25:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 10:25:34.675467994 +0000 UTC m=+3366.656792348" watchObservedRunningTime="2025-09-29 10:25:34.677829463 +0000 UTC m=+3366.659153367" Sep 29 10:25:34 crc kubenswrapper[4779]: I0929 10:25:34.688263 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"c72961de-9fe8-4a03-b6df-d12de65986f1","Type":"ContainerStarted","Data":"c6029bc4a940d5a7a99b52fa7fa89e16416af5faacd45e6b654e4984ae5c9f7b"} Sep 29 10:25:34 crc kubenswrapper[4779]: I0929 10:25:34.744340 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-volume-volume1-0" podStartSLOduration=3.993315456 podStartE2EDuration="4.742524615s" podCreationTimestamp="2025-09-29 10:25:30 +0000 UTC" firstStartedPulling="2025-09-29 10:25:31.783969402 +0000 UTC m=+3363.765293306" lastFinishedPulling="2025-09-29 10:25:32.533178561 +0000 UTC m=+3364.514502465" observedRunningTime="2025-09-29 10:25:34.731828473 +0000 UTC m=+3366.713152377" watchObservedRunningTime="2025-09-29 10:25:34.742524615 +0000 UTC m=+3366.723848529" Sep 29 10:25:34 crc kubenswrapper[4779]: I0929 10:25:34.793596 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-54748d8d4-lx86m"] Sep 29 10:25:35 crc kubenswrapper[4779]: I0929 10:25:35.452235 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7d46749f98-lswks"] Sep 29 10:25:35 crc kubenswrapper[4779]: I0929 10:25:35.536209 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-backup-0" Sep 29 10:25:35 crc kubenswrapper[4779]: I0929 10:25:35.691870 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-volume-volume1-0" Sep 29 10:25:35 crc kubenswrapper[4779]: I0929 10:25:35.732514 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-54748d8d4-lx86m" event={"ID":"17e72d33-8852-486a-8973-fe22a52f6e00","Type":"ContainerStarted","Data":"f55d14e01eab69dba2c5e4bddaf46172efad3a116a26c0368733c9bbbcff6cbd"} Sep 29 10:25:35 crc kubenswrapper[4779]: I0929 10:25:35.742757 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7d46749f98-lswks" event={"ID":"4e4ac544-73b5-4ec4-975d-83eac168a331","Type":"ContainerStarted","Data":"06754258f8c18dc2bee5ee152252f45dcd10caa9dc97550f00b65928e922fae4"} Sep 29 10:25:36 crc kubenswrapper[4779]: I0929 10:25:36.423594 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-volume-volume2-0" Sep 29 10:25:36 crc kubenswrapper[4779]: I0929 10:25:36.784214 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5a29c423-dcec-471e-9dd0-f1032e3f2dc4","Type":"ContainerStarted","Data":"9572b12ed51f74fee82d5aea82d5e13999cd12528937e8174f1c798227b43892"} Sep 29 10:25:36 crc kubenswrapper[4779]: I0929 10:25:36.792125 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="23901a8a-449b-4852-8b75-c1615c0501f7" containerName="glance-log" containerID="cri-o://ce6bf8934c3dcdfea64f850eb2ecc6fddbf647a1de2a55198e415dc8600e5716" gracePeriod=30 Sep 29 10:25:36 crc kubenswrapper[4779]: I0929 10:25:36.792541 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"23901a8a-449b-4852-8b75-c1615c0501f7","Type":"ContainerStarted","Data":"f0b83822aaf75f66b2be78c1e305f0939e50790d0a9a7be2b8dcfd9188e70fad"} Sep 29 10:25:36 crc kubenswrapper[4779]: I0929 10:25:36.794838 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="23901a8a-449b-4852-8b75-c1615c0501f7" containerName="glance-httpd" containerID="cri-o://f0b83822aaf75f66b2be78c1e305f0939e50790d0a9a7be2b8dcfd9188e70fad" gracePeriod=30 Sep 29 10:25:36 crc kubenswrapper[4779]: I0929 10:25:36.839094 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=5.839066818 podStartE2EDuration="5.839066818s" podCreationTimestamp="2025-09-29 10:25:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 10:25:36.831636511 +0000 UTC m=+3368.812960425" watchObservedRunningTime="2025-09-29 10:25:36.839066818 +0000 UTC m=+3368.820390722" Sep 29 10:25:37 crc kubenswrapper[4779]: I0929 10:25:37.654411 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 10:25:37 crc kubenswrapper[4779]: I0929 10:25:37.699769 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/23901a8a-449b-4852-8b75-c1615c0501f7-logs\") pod \"23901a8a-449b-4852-8b75-c1615c0501f7\" (UID: \"23901a8a-449b-4852-8b75-c1615c0501f7\") " Sep 29 10:25:37 crc kubenswrapper[4779]: I0929 10:25:37.699847 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23901a8a-449b-4852-8b75-c1615c0501f7-config-data\") pod \"23901a8a-449b-4852-8b75-c1615c0501f7\" (UID: \"23901a8a-449b-4852-8b75-c1615c0501f7\") " Sep 29 10:25:37 crc kubenswrapper[4779]: I0929 10:25:37.700806 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9w4kn\" (UniqueName: \"kubernetes.io/projected/23901a8a-449b-4852-8b75-c1615c0501f7-kube-api-access-9w4kn\") pod \"23901a8a-449b-4852-8b75-c1615c0501f7\" (UID: \"23901a8a-449b-4852-8b75-c1615c0501f7\") " Sep 29 10:25:37 crc kubenswrapper[4779]: I0929 10:25:37.700921 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23901a8a-449b-4852-8b75-c1615c0501f7-scripts\") pod \"23901a8a-449b-4852-8b75-c1615c0501f7\" (UID: \"23901a8a-449b-4852-8b75-c1615c0501f7\") " Sep 29 10:25:37 crc kubenswrapper[4779]: I0929 10:25:37.700984 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/23901a8a-449b-4852-8b75-c1615c0501f7-public-tls-certs\") pod \"23901a8a-449b-4852-8b75-c1615c0501f7\" (UID: \"23901a8a-449b-4852-8b75-c1615c0501f7\") " Sep 29 10:25:37 crc kubenswrapper[4779]: I0929 10:25:37.701065 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23901a8a-449b-4852-8b75-c1615c0501f7-combined-ca-bundle\") pod \"23901a8a-449b-4852-8b75-c1615c0501f7\" (UID: \"23901a8a-449b-4852-8b75-c1615c0501f7\") " Sep 29 10:25:37 crc kubenswrapper[4779]: I0929 10:25:37.701124 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/23901a8a-449b-4852-8b75-c1615c0501f7-httpd-run\") pod \"23901a8a-449b-4852-8b75-c1615c0501f7\" (UID: \"23901a8a-449b-4852-8b75-c1615c0501f7\") " Sep 29 10:25:37 crc kubenswrapper[4779]: I0929 10:25:37.701225 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/23901a8a-449b-4852-8b75-c1615c0501f7-ceph\") pod \"23901a8a-449b-4852-8b75-c1615c0501f7\" (UID: \"23901a8a-449b-4852-8b75-c1615c0501f7\") " Sep 29 10:25:37 crc kubenswrapper[4779]: I0929 10:25:37.701264 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"23901a8a-449b-4852-8b75-c1615c0501f7\" (UID: \"23901a8a-449b-4852-8b75-c1615c0501f7\") " Sep 29 10:25:37 crc kubenswrapper[4779]: I0929 10:25:37.702628 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/23901a8a-449b-4852-8b75-c1615c0501f7-logs" (OuterVolumeSpecName: "logs") pod "23901a8a-449b-4852-8b75-c1615c0501f7" (UID: "23901a8a-449b-4852-8b75-c1615c0501f7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 10:25:37 crc kubenswrapper[4779]: I0929 10:25:37.703957 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/23901a8a-449b-4852-8b75-c1615c0501f7-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "23901a8a-449b-4852-8b75-c1615c0501f7" (UID: "23901a8a-449b-4852-8b75-c1615c0501f7"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 10:25:37 crc kubenswrapper[4779]: I0929 10:25:37.709189 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23901a8a-449b-4852-8b75-c1615c0501f7-scripts" (OuterVolumeSpecName: "scripts") pod "23901a8a-449b-4852-8b75-c1615c0501f7" (UID: "23901a8a-449b-4852-8b75-c1615c0501f7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:25:37 crc kubenswrapper[4779]: I0929 10:25:37.710372 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/23901a8a-449b-4852-8b75-c1615c0501f7-kube-api-access-9w4kn" (OuterVolumeSpecName: "kube-api-access-9w4kn") pod "23901a8a-449b-4852-8b75-c1615c0501f7" (UID: "23901a8a-449b-4852-8b75-c1615c0501f7"). InnerVolumeSpecName "kube-api-access-9w4kn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 10:25:37 crc kubenswrapper[4779]: I0929 10:25:37.716355 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/23901a8a-449b-4852-8b75-c1615c0501f7-ceph" (OuterVolumeSpecName: "ceph") pod "23901a8a-449b-4852-8b75-c1615c0501f7" (UID: "23901a8a-449b-4852-8b75-c1615c0501f7"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 10:25:37 crc kubenswrapper[4779]: I0929 10:25:37.737263 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "glance") pod "23901a8a-449b-4852-8b75-c1615c0501f7" (UID: "23901a8a-449b-4852-8b75-c1615c0501f7"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 29 10:25:37 crc kubenswrapper[4779]: I0929 10:25:37.749385 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23901a8a-449b-4852-8b75-c1615c0501f7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "23901a8a-449b-4852-8b75-c1615c0501f7" (UID: "23901a8a-449b-4852-8b75-c1615c0501f7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:25:37 crc kubenswrapper[4779]: I0929 10:25:37.800708 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23901a8a-449b-4852-8b75-c1615c0501f7-config-data" (OuterVolumeSpecName: "config-data") pod "23901a8a-449b-4852-8b75-c1615c0501f7" (UID: "23901a8a-449b-4852-8b75-c1615c0501f7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:25:37 crc kubenswrapper[4779]: I0929 10:25:37.805054 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9w4kn\" (UniqueName: \"kubernetes.io/projected/23901a8a-449b-4852-8b75-c1615c0501f7-kube-api-access-9w4kn\") on node \"crc\" DevicePath \"\"" Sep 29 10:25:37 crc kubenswrapper[4779]: I0929 10:25:37.805101 4779 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23901a8a-449b-4852-8b75-c1615c0501f7-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 10:25:37 crc kubenswrapper[4779]: I0929 10:25:37.805113 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23901a8a-449b-4852-8b75-c1615c0501f7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 10:25:37 crc kubenswrapper[4779]: I0929 10:25:37.805125 4779 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/23901a8a-449b-4852-8b75-c1615c0501f7-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 29 10:25:37 crc kubenswrapper[4779]: I0929 10:25:37.805137 4779 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/23901a8a-449b-4852-8b75-c1615c0501f7-ceph\") on node \"crc\" DevicePath \"\"" Sep 29 10:25:37 crc kubenswrapper[4779]: I0929 10:25:37.805163 4779 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" " Sep 29 10:25:37 crc kubenswrapper[4779]: I0929 10:25:37.805178 4779 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/23901a8a-449b-4852-8b75-c1615c0501f7-logs\") on node \"crc\" DevicePath \"\"" Sep 29 10:25:37 crc kubenswrapper[4779]: I0929 10:25:37.805191 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23901a8a-449b-4852-8b75-c1615c0501f7-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 10:25:37 crc kubenswrapper[4779]: I0929 10:25:37.820427 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23901a8a-449b-4852-8b75-c1615c0501f7-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "23901a8a-449b-4852-8b75-c1615c0501f7" (UID: "23901a8a-449b-4852-8b75-c1615c0501f7"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:25:37 crc kubenswrapper[4779]: I0929 10:25:37.855156 4779 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage06-crc" (UniqueName: "kubernetes.io/local-volume/local-storage06-crc") on node "crc" Sep 29 10:25:37 crc kubenswrapper[4779]: I0929 10:25:37.890462 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5a29c423-dcec-471e-9dd0-f1032e3f2dc4","Type":"ContainerStarted","Data":"927dda149384c781c5cc803918872cc90854303ed034144420c3f911d20405ef"} Sep 29 10:25:37 crc kubenswrapper[4779]: I0929 10:25:37.890938 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="5a29c423-dcec-471e-9dd0-f1032e3f2dc4" containerName="glance-log" containerID="cri-o://9572b12ed51f74fee82d5aea82d5e13999cd12528937e8174f1c798227b43892" gracePeriod=30 Sep 29 10:25:37 crc kubenswrapper[4779]: I0929 10:25:37.891390 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="5a29c423-dcec-471e-9dd0-f1032e3f2dc4" containerName="glance-httpd" containerID="cri-o://927dda149384c781c5cc803918872cc90854303ed034144420c3f911d20405ef" gracePeriod=30 Sep 29 10:25:37 crc kubenswrapper[4779]: I0929 10:25:37.901736 4779 generic.go:334] "Generic (PLEG): container finished" podID="23901a8a-449b-4852-8b75-c1615c0501f7" containerID="f0b83822aaf75f66b2be78c1e305f0939e50790d0a9a7be2b8dcfd9188e70fad" exitCode=0 Sep 29 10:25:37 crc kubenswrapper[4779]: I0929 10:25:37.901769 4779 generic.go:334] "Generic (PLEG): container finished" podID="23901a8a-449b-4852-8b75-c1615c0501f7" containerID="ce6bf8934c3dcdfea64f850eb2ecc6fddbf647a1de2a55198e415dc8600e5716" exitCode=143 Sep 29 10:25:37 crc kubenswrapper[4779]: I0929 10:25:37.901798 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"23901a8a-449b-4852-8b75-c1615c0501f7","Type":"ContainerDied","Data":"f0b83822aaf75f66b2be78c1e305f0939e50790d0a9a7be2b8dcfd9188e70fad"} Sep 29 10:25:37 crc kubenswrapper[4779]: I0929 10:25:37.901840 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"23901a8a-449b-4852-8b75-c1615c0501f7","Type":"ContainerDied","Data":"ce6bf8934c3dcdfea64f850eb2ecc6fddbf647a1de2a55198e415dc8600e5716"} Sep 29 10:25:37 crc kubenswrapper[4779]: I0929 10:25:37.901860 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"23901a8a-449b-4852-8b75-c1615c0501f7","Type":"ContainerDied","Data":"6303e33bb3e15415a4603ec578b5b499298eff107810fd9def551e355e8ec4af"} Sep 29 10:25:37 crc kubenswrapper[4779]: I0929 10:25:37.901959 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 10:25:37 crc kubenswrapper[4779]: I0929 10:25:37.902028 4779 scope.go:117] "RemoveContainer" containerID="f0b83822aaf75f66b2be78c1e305f0939e50790d0a9a7be2b8dcfd9188e70fad" Sep 29 10:25:37 crc kubenswrapper[4779]: I0929 10:25:37.930822 4779 reconciler_common.go:293] "Volume detached for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" DevicePath \"\"" Sep 29 10:25:37 crc kubenswrapper[4779]: I0929 10:25:37.930893 4779 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/23901a8a-449b-4852-8b75-c1615c0501f7-public-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 10:25:37 crc kubenswrapper[4779]: I0929 10:25:37.950113 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=5.950079299 podStartE2EDuration="5.950079299s" podCreationTimestamp="2025-09-29 10:25:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 10:25:37.933828677 +0000 UTC m=+3369.915152581" watchObservedRunningTime="2025-09-29 10:25:37.950079299 +0000 UTC m=+3369.931403203" Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.031966 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.039380 4779 scope.go:117] "RemoveContainer" containerID="ce6bf8934c3dcdfea64f850eb2ecc6fddbf647a1de2a55198e415dc8600e5716" Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.057528 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.070976 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 10:25:38 crc kubenswrapper[4779]: E0929 10:25:38.071813 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23901a8a-449b-4852-8b75-c1615c0501f7" containerName="glance-log" Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.071841 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="23901a8a-449b-4852-8b75-c1615c0501f7" containerName="glance-log" Sep 29 10:25:38 crc kubenswrapper[4779]: E0929 10:25:38.071870 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23901a8a-449b-4852-8b75-c1615c0501f7" containerName="glance-httpd" Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.071878 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="23901a8a-449b-4852-8b75-c1615c0501f7" containerName="glance-httpd" Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.072177 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="23901a8a-449b-4852-8b75-c1615c0501f7" containerName="glance-httpd" Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.072220 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="23901a8a-449b-4852-8b75-c1615c0501f7" containerName="glance-log" Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.074573 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.079020 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.079039 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.094178 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.114843 4779 scope.go:117] "RemoveContainer" containerID="f0b83822aaf75f66b2be78c1e305f0939e50790d0a9a7be2b8dcfd9188e70fad" Sep 29 10:25:38 crc kubenswrapper[4779]: E0929 10:25:38.115646 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f0b83822aaf75f66b2be78c1e305f0939e50790d0a9a7be2b8dcfd9188e70fad\": container with ID starting with f0b83822aaf75f66b2be78c1e305f0939e50790d0a9a7be2b8dcfd9188e70fad not found: ID does not exist" containerID="f0b83822aaf75f66b2be78c1e305f0939e50790d0a9a7be2b8dcfd9188e70fad" Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.115736 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f0b83822aaf75f66b2be78c1e305f0939e50790d0a9a7be2b8dcfd9188e70fad"} err="failed to get container status \"f0b83822aaf75f66b2be78c1e305f0939e50790d0a9a7be2b8dcfd9188e70fad\": rpc error: code = NotFound desc = could not find container \"f0b83822aaf75f66b2be78c1e305f0939e50790d0a9a7be2b8dcfd9188e70fad\": container with ID starting with f0b83822aaf75f66b2be78c1e305f0939e50790d0a9a7be2b8dcfd9188e70fad not found: ID does not exist" Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.115821 4779 scope.go:117] "RemoveContainer" containerID="ce6bf8934c3dcdfea64f850eb2ecc6fddbf647a1de2a55198e415dc8600e5716" Sep 29 10:25:38 crc kubenswrapper[4779]: E0929 10:25:38.116613 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ce6bf8934c3dcdfea64f850eb2ecc6fddbf647a1de2a55198e415dc8600e5716\": container with ID starting with ce6bf8934c3dcdfea64f850eb2ecc6fddbf647a1de2a55198e415dc8600e5716 not found: ID does not exist" containerID="ce6bf8934c3dcdfea64f850eb2ecc6fddbf647a1de2a55198e415dc8600e5716" Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.116653 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ce6bf8934c3dcdfea64f850eb2ecc6fddbf647a1de2a55198e415dc8600e5716"} err="failed to get container status \"ce6bf8934c3dcdfea64f850eb2ecc6fddbf647a1de2a55198e415dc8600e5716\": rpc error: code = NotFound desc = could not find container \"ce6bf8934c3dcdfea64f850eb2ecc6fddbf647a1de2a55198e415dc8600e5716\": container with ID starting with ce6bf8934c3dcdfea64f850eb2ecc6fddbf647a1de2a55198e415dc8600e5716 not found: ID does not exist" Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.116675 4779 scope.go:117] "RemoveContainer" containerID="f0b83822aaf75f66b2be78c1e305f0939e50790d0a9a7be2b8dcfd9188e70fad" Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.120206 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f0b83822aaf75f66b2be78c1e305f0939e50790d0a9a7be2b8dcfd9188e70fad"} err="failed to get container status \"f0b83822aaf75f66b2be78c1e305f0939e50790d0a9a7be2b8dcfd9188e70fad\": rpc error: code = NotFound desc = could not find container \"f0b83822aaf75f66b2be78c1e305f0939e50790d0a9a7be2b8dcfd9188e70fad\": container with ID starting with f0b83822aaf75f66b2be78c1e305f0939e50790d0a9a7be2b8dcfd9188e70fad not found: ID does not exist" Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.120267 4779 scope.go:117] "RemoveContainer" containerID="ce6bf8934c3dcdfea64f850eb2ecc6fddbf647a1de2a55198e415dc8600e5716" Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.121533 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ce6bf8934c3dcdfea64f850eb2ecc6fddbf647a1de2a55198e415dc8600e5716"} err="failed to get container status \"ce6bf8934c3dcdfea64f850eb2ecc6fddbf647a1de2a55198e415dc8600e5716\": rpc error: code = NotFound desc = could not find container \"ce6bf8934c3dcdfea64f850eb2ecc6fddbf647a1de2a55198e415dc8600e5716\": container with ID starting with ce6bf8934c3dcdfea64f850eb2ecc6fddbf647a1de2a55198e415dc8600e5716 not found: ID does not exist" Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.135242 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ae44fb6f-4ba0-4e8c-801e-ce14e3c69a78-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"ae44fb6f-4ba0-4e8c-801e-ce14e3c69a78\") " pod="openstack/glance-default-external-api-0" Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.135303 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ae44fb6f-4ba0-4e8c-801e-ce14e3c69a78-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"ae44fb6f-4ba0-4e8c-801e-ce14e3c69a78\") " pod="openstack/glance-default-external-api-0" Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.135348 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae44fb6f-4ba0-4e8c-801e-ce14e3c69a78-logs\") pod \"glance-default-external-api-0\" (UID: \"ae44fb6f-4ba0-4e8c-801e-ce14e3c69a78\") " pod="openstack/glance-default-external-api-0" Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.135395 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ae44fb6f-4ba0-4e8c-801e-ce14e3c69a78-scripts\") pod \"glance-default-external-api-0\" (UID: \"ae44fb6f-4ba0-4e8c-801e-ce14e3c69a78\") " pod="openstack/glance-default-external-api-0" Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.135439 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-external-api-0\" (UID: \"ae44fb6f-4ba0-4e8c-801e-ce14e3c69a78\") " pod="openstack/glance-default-external-api-0" Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.135460 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae44fb6f-4ba0-4e8c-801e-ce14e3c69a78-config-data\") pod \"glance-default-external-api-0\" (UID: \"ae44fb6f-4ba0-4e8c-801e-ce14e3c69a78\") " pod="openstack/glance-default-external-api-0" Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.135513 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/ae44fb6f-4ba0-4e8c-801e-ce14e3c69a78-ceph\") pod \"glance-default-external-api-0\" (UID: \"ae44fb6f-4ba0-4e8c-801e-ce14e3c69a78\") " pod="openstack/glance-default-external-api-0" Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.135619 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae44fb6f-4ba0-4e8c-801e-ce14e3c69a78-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"ae44fb6f-4ba0-4e8c-801e-ce14e3c69a78\") " pod="openstack/glance-default-external-api-0" Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.135665 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-54vxj\" (UniqueName: \"kubernetes.io/projected/ae44fb6f-4ba0-4e8c-801e-ce14e3c69a78-kube-api-access-54vxj\") pod \"glance-default-external-api-0\" (UID: \"ae44fb6f-4ba0-4e8c-801e-ce14e3c69a78\") " pod="openstack/glance-default-external-api-0" Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.237200 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae44fb6f-4ba0-4e8c-801e-ce14e3c69a78-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"ae44fb6f-4ba0-4e8c-801e-ce14e3c69a78\") " pod="openstack/glance-default-external-api-0" Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.237279 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-54vxj\" (UniqueName: \"kubernetes.io/projected/ae44fb6f-4ba0-4e8c-801e-ce14e3c69a78-kube-api-access-54vxj\") pod \"glance-default-external-api-0\" (UID: \"ae44fb6f-4ba0-4e8c-801e-ce14e3c69a78\") " pod="openstack/glance-default-external-api-0" Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.237361 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ae44fb6f-4ba0-4e8c-801e-ce14e3c69a78-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"ae44fb6f-4ba0-4e8c-801e-ce14e3c69a78\") " pod="openstack/glance-default-external-api-0" Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.237433 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ae44fb6f-4ba0-4e8c-801e-ce14e3c69a78-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"ae44fb6f-4ba0-4e8c-801e-ce14e3c69a78\") " pod="openstack/glance-default-external-api-0" Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.237464 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae44fb6f-4ba0-4e8c-801e-ce14e3c69a78-logs\") pod \"glance-default-external-api-0\" (UID: \"ae44fb6f-4ba0-4e8c-801e-ce14e3c69a78\") " pod="openstack/glance-default-external-api-0" Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.237511 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ae44fb6f-4ba0-4e8c-801e-ce14e3c69a78-scripts\") pod \"glance-default-external-api-0\" (UID: \"ae44fb6f-4ba0-4e8c-801e-ce14e3c69a78\") " pod="openstack/glance-default-external-api-0" Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.237565 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-external-api-0\" (UID: \"ae44fb6f-4ba0-4e8c-801e-ce14e3c69a78\") " pod="openstack/glance-default-external-api-0" Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.237598 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae44fb6f-4ba0-4e8c-801e-ce14e3c69a78-config-data\") pod \"glance-default-external-api-0\" (UID: \"ae44fb6f-4ba0-4e8c-801e-ce14e3c69a78\") " pod="openstack/glance-default-external-api-0" Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.237704 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/ae44fb6f-4ba0-4e8c-801e-ce14e3c69a78-ceph\") pod \"glance-default-external-api-0\" (UID: \"ae44fb6f-4ba0-4e8c-801e-ce14e3c69a78\") " pod="openstack/glance-default-external-api-0" Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.237996 4779 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-external-api-0\" (UID: \"ae44fb6f-4ba0-4e8c-801e-ce14e3c69a78\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/glance-default-external-api-0" Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.238837 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae44fb6f-4ba0-4e8c-801e-ce14e3c69a78-logs\") pod \"glance-default-external-api-0\" (UID: \"ae44fb6f-4ba0-4e8c-801e-ce14e3c69a78\") " pod="openstack/glance-default-external-api-0" Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.239071 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ae44fb6f-4ba0-4e8c-801e-ce14e3c69a78-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"ae44fb6f-4ba0-4e8c-801e-ce14e3c69a78\") " pod="openstack/glance-default-external-api-0" Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.247372 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/ae44fb6f-4ba0-4e8c-801e-ce14e3c69a78-ceph\") pod \"glance-default-external-api-0\" (UID: \"ae44fb6f-4ba0-4e8c-801e-ce14e3c69a78\") " pod="openstack/glance-default-external-api-0" Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.250273 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ae44fb6f-4ba0-4e8c-801e-ce14e3c69a78-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"ae44fb6f-4ba0-4e8c-801e-ce14e3c69a78\") " pod="openstack/glance-default-external-api-0" Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.250403 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae44fb6f-4ba0-4e8c-801e-ce14e3c69a78-config-data\") pod \"glance-default-external-api-0\" (UID: \"ae44fb6f-4ba0-4e8c-801e-ce14e3c69a78\") " pod="openstack/glance-default-external-api-0" Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.254536 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae44fb6f-4ba0-4e8c-801e-ce14e3c69a78-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"ae44fb6f-4ba0-4e8c-801e-ce14e3c69a78\") " pod="openstack/glance-default-external-api-0" Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.254512 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ae44fb6f-4ba0-4e8c-801e-ce14e3c69a78-scripts\") pod \"glance-default-external-api-0\" (UID: \"ae44fb6f-4ba0-4e8c-801e-ce14e3c69a78\") " pod="openstack/glance-default-external-api-0" Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.260388 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-54vxj\" (UniqueName: \"kubernetes.io/projected/ae44fb6f-4ba0-4e8c-801e-ce14e3c69a78-kube-api-access-54vxj\") pod \"glance-default-external-api-0\" (UID: \"ae44fb6f-4ba0-4e8c-801e-ce14e3c69a78\") " pod="openstack/glance-default-external-api-0" Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.282745 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-external-api-0\" (UID: \"ae44fb6f-4ba0-4e8c-801e-ce14e3c69a78\") " pod="openstack/glance-default-external-api-0" Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.421102 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.747341 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="23901a8a-449b-4852-8b75-c1615c0501f7" path="/var/lib/kubelet/pods/23901a8a-449b-4852-8b75-c1615c0501f7/volumes" Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.846598 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.959785 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"5a29c423-dcec-471e-9dd0-f1032e3f2dc4\" (UID: \"5a29c423-dcec-471e-9dd0-f1032e3f2dc4\") " Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.960350 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5a29c423-dcec-471e-9dd0-f1032e3f2dc4-httpd-run\") pod \"5a29c423-dcec-471e-9dd0-f1032e3f2dc4\" (UID: \"5a29c423-dcec-471e-9dd0-f1032e3f2dc4\") " Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.960390 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5a29c423-dcec-471e-9dd0-f1032e3f2dc4-scripts\") pod \"5a29c423-dcec-471e-9dd0-f1032e3f2dc4\" (UID: \"5a29c423-dcec-471e-9dd0-f1032e3f2dc4\") " Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.960504 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8v7h6\" (UniqueName: \"kubernetes.io/projected/5a29c423-dcec-471e-9dd0-f1032e3f2dc4-kube-api-access-8v7h6\") pod \"5a29c423-dcec-471e-9dd0-f1032e3f2dc4\" (UID: \"5a29c423-dcec-471e-9dd0-f1032e3f2dc4\") " Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.960535 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a29c423-dcec-471e-9dd0-f1032e3f2dc4-combined-ca-bundle\") pod \"5a29c423-dcec-471e-9dd0-f1032e3f2dc4\" (UID: \"5a29c423-dcec-471e-9dd0-f1032e3f2dc4\") " Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.960564 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a29c423-dcec-471e-9dd0-f1032e3f2dc4-internal-tls-certs\") pod \"5a29c423-dcec-471e-9dd0-f1032e3f2dc4\" (UID: \"5a29c423-dcec-471e-9dd0-f1032e3f2dc4\") " Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.960612 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/5a29c423-dcec-471e-9dd0-f1032e3f2dc4-ceph\") pod \"5a29c423-dcec-471e-9dd0-f1032e3f2dc4\" (UID: \"5a29c423-dcec-471e-9dd0-f1032e3f2dc4\") " Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.960718 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a29c423-dcec-471e-9dd0-f1032e3f2dc4-config-data\") pod \"5a29c423-dcec-471e-9dd0-f1032e3f2dc4\" (UID: \"5a29c423-dcec-471e-9dd0-f1032e3f2dc4\") " Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.960793 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5a29c423-dcec-471e-9dd0-f1032e3f2dc4-logs\") pod \"5a29c423-dcec-471e-9dd0-f1032e3f2dc4\" (UID: \"5a29c423-dcec-471e-9dd0-f1032e3f2dc4\") " Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.961711 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5a29c423-dcec-471e-9dd0-f1032e3f2dc4-logs" (OuterVolumeSpecName: "logs") pod "5a29c423-dcec-471e-9dd0-f1032e3f2dc4" (UID: "5a29c423-dcec-471e-9dd0-f1032e3f2dc4"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.972977 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5a29c423-dcec-471e-9dd0-f1032e3f2dc4-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "5a29c423-dcec-471e-9dd0-f1032e3f2dc4" (UID: "5a29c423-dcec-471e-9dd0-f1032e3f2dc4"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.983126 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "glance") pod "5a29c423-dcec-471e-9dd0-f1032e3f2dc4" (UID: "5a29c423-dcec-471e-9dd0-f1032e3f2dc4"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.983545 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5a29c423-dcec-471e-9dd0-f1032e3f2dc4-ceph" (OuterVolumeSpecName: "ceph") pod "5a29c423-dcec-471e-9dd0-f1032e3f2dc4" (UID: "5a29c423-dcec-471e-9dd0-f1032e3f2dc4"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.986548 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a29c423-dcec-471e-9dd0-f1032e3f2dc4-scripts" (OuterVolumeSpecName: "scripts") pod "5a29c423-dcec-471e-9dd0-f1032e3f2dc4" (UID: "5a29c423-dcec-471e-9dd0-f1032e3f2dc4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:25:38 crc kubenswrapper[4779]: I0929 10:25:38.987290 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5a29c423-dcec-471e-9dd0-f1032e3f2dc4-kube-api-access-8v7h6" (OuterVolumeSpecName: "kube-api-access-8v7h6") pod "5a29c423-dcec-471e-9dd0-f1032e3f2dc4" (UID: "5a29c423-dcec-471e-9dd0-f1032e3f2dc4"). InnerVolumeSpecName "kube-api-access-8v7h6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.006382 4779 generic.go:334] "Generic (PLEG): container finished" podID="5a29c423-dcec-471e-9dd0-f1032e3f2dc4" containerID="927dda149384c781c5cc803918872cc90854303ed034144420c3f911d20405ef" exitCode=0 Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.006430 4779 generic.go:334] "Generic (PLEG): container finished" podID="5a29c423-dcec-471e-9dd0-f1032e3f2dc4" containerID="9572b12ed51f74fee82d5aea82d5e13999cd12528937e8174f1c798227b43892" exitCode=143 Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.006460 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5a29c423-dcec-471e-9dd0-f1032e3f2dc4","Type":"ContainerDied","Data":"927dda149384c781c5cc803918872cc90854303ed034144420c3f911d20405ef"} Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.006495 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5a29c423-dcec-471e-9dd0-f1032e3f2dc4","Type":"ContainerDied","Data":"9572b12ed51f74fee82d5aea82d5e13999cd12528937e8174f1c798227b43892"} Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.006510 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5a29c423-dcec-471e-9dd0-f1032e3f2dc4","Type":"ContainerDied","Data":"fe02c124ae04a13f6b5fd6cdb03aeb966420561306796841dc449ac611d3e562"} Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.006534 4779 scope.go:117] "RemoveContainer" containerID="927dda149384c781c5cc803918872cc90854303ed034144420c3f911d20405ef" Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.006755 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.065631 4779 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5a29c423-dcec-471e-9dd0-f1032e3f2dc4-logs\") on node \"crc\" DevicePath \"\"" Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.065691 4779 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.065705 4779 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5a29c423-dcec-471e-9dd0-f1032e3f2dc4-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.065716 4779 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5a29c423-dcec-471e-9dd0-f1032e3f2dc4-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.065733 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8v7h6\" (UniqueName: \"kubernetes.io/projected/5a29c423-dcec-471e-9dd0-f1032e3f2dc4-kube-api-access-8v7h6\") on node \"crc\" DevicePath \"\"" Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.065748 4779 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/5a29c423-dcec-471e-9dd0-f1032e3f2dc4-ceph\") on node \"crc\" DevicePath \"\"" Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.107970 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a29c423-dcec-471e-9dd0-f1032e3f2dc4-config-data" (OuterVolumeSpecName: "config-data") pod "5a29c423-dcec-471e-9dd0-f1032e3f2dc4" (UID: "5a29c423-dcec-471e-9dd0-f1032e3f2dc4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.108637 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a29c423-dcec-471e-9dd0-f1032e3f2dc4-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "5a29c423-dcec-471e-9dd0-f1032e3f2dc4" (UID: "5a29c423-dcec-471e-9dd0-f1032e3f2dc4"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.125111 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a29c423-dcec-471e-9dd0-f1032e3f2dc4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5a29c423-dcec-471e-9dd0-f1032e3f2dc4" (UID: "5a29c423-dcec-471e-9dd0-f1032e3f2dc4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.139087 4779 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.167891 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a29c423-dcec-471e-9dd0-f1032e3f2dc4-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.175130 4779 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.175148 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a29c423-dcec-471e-9dd0-f1032e3f2dc4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.175160 4779 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a29c423-dcec-471e-9dd0-f1032e3f2dc4-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.221177 4779 scope.go:117] "RemoveContainer" containerID="9572b12ed51f74fee82d5aea82d5e13999cd12528937e8174f1c798227b43892" Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.270644 4779 scope.go:117] "RemoveContainer" containerID="927dda149384c781c5cc803918872cc90854303ed034144420c3f911d20405ef" Sep 29 10:25:39 crc kubenswrapper[4779]: E0929 10:25:39.273137 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"927dda149384c781c5cc803918872cc90854303ed034144420c3f911d20405ef\": container with ID starting with 927dda149384c781c5cc803918872cc90854303ed034144420c3f911d20405ef not found: ID does not exist" containerID="927dda149384c781c5cc803918872cc90854303ed034144420c3f911d20405ef" Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.273186 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"927dda149384c781c5cc803918872cc90854303ed034144420c3f911d20405ef"} err="failed to get container status \"927dda149384c781c5cc803918872cc90854303ed034144420c3f911d20405ef\": rpc error: code = NotFound desc = could not find container \"927dda149384c781c5cc803918872cc90854303ed034144420c3f911d20405ef\": container with ID starting with 927dda149384c781c5cc803918872cc90854303ed034144420c3f911d20405ef not found: ID does not exist" Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.273221 4779 scope.go:117] "RemoveContainer" containerID="9572b12ed51f74fee82d5aea82d5e13999cd12528937e8174f1c798227b43892" Sep 29 10:25:39 crc kubenswrapper[4779]: E0929 10:25:39.273892 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9572b12ed51f74fee82d5aea82d5e13999cd12528937e8174f1c798227b43892\": container with ID starting with 9572b12ed51f74fee82d5aea82d5e13999cd12528937e8174f1c798227b43892 not found: ID does not exist" containerID="9572b12ed51f74fee82d5aea82d5e13999cd12528937e8174f1c798227b43892" Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.273977 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9572b12ed51f74fee82d5aea82d5e13999cd12528937e8174f1c798227b43892"} err="failed to get container status \"9572b12ed51f74fee82d5aea82d5e13999cd12528937e8174f1c798227b43892\": rpc error: code = NotFound desc = could not find container \"9572b12ed51f74fee82d5aea82d5e13999cd12528937e8174f1c798227b43892\": container with ID starting with 9572b12ed51f74fee82d5aea82d5e13999cd12528937e8174f1c798227b43892 not found: ID does not exist" Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.274045 4779 scope.go:117] "RemoveContainer" containerID="927dda149384c781c5cc803918872cc90854303ed034144420c3f911d20405ef" Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.274589 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"927dda149384c781c5cc803918872cc90854303ed034144420c3f911d20405ef"} err="failed to get container status \"927dda149384c781c5cc803918872cc90854303ed034144420c3f911d20405ef\": rpc error: code = NotFound desc = could not find container \"927dda149384c781c5cc803918872cc90854303ed034144420c3f911d20405ef\": container with ID starting with 927dda149384c781c5cc803918872cc90854303ed034144420c3f911d20405ef not found: ID does not exist" Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.274644 4779 scope.go:117] "RemoveContainer" containerID="9572b12ed51f74fee82d5aea82d5e13999cd12528937e8174f1c798227b43892" Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.275058 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9572b12ed51f74fee82d5aea82d5e13999cd12528937e8174f1c798227b43892"} err="failed to get container status \"9572b12ed51f74fee82d5aea82d5e13999cd12528937e8174f1c798227b43892\": rpc error: code = NotFound desc = could not find container \"9572b12ed51f74fee82d5aea82d5e13999cd12528937e8174f1c798227b43892\": container with ID starting with 9572b12ed51f74fee82d5aea82d5e13999cd12528937e8174f1c798227b43892 not found: ID does not exist" Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.377211 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.399859 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 10:25:39 crc kubenswrapper[4779]: W0929 10:25:39.401421 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podae44fb6f_4ba0_4e8c_801e_ce14e3c69a78.slice/crio-09aa6abfc872155ae7d3f06037310f7b0ea6c89b21323cc48b011112bd0e34a6 WatchSource:0}: Error finding container 09aa6abfc872155ae7d3f06037310f7b0ea6c89b21323cc48b011112bd0e34a6: Status 404 returned error can't find the container with id 09aa6abfc872155ae7d3f06037310f7b0ea6c89b21323cc48b011112bd0e34a6 Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.419600 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.435607 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 10:25:39 crc kubenswrapper[4779]: E0929 10:25:39.436329 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a29c423-dcec-471e-9dd0-f1032e3f2dc4" containerName="glance-httpd" Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.436348 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a29c423-dcec-471e-9dd0-f1032e3f2dc4" containerName="glance-httpd" Sep 29 10:25:39 crc kubenswrapper[4779]: E0929 10:25:39.436369 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a29c423-dcec-471e-9dd0-f1032e3f2dc4" containerName="glance-log" Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.436379 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a29c423-dcec-471e-9dd0-f1032e3f2dc4" containerName="glance-log" Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.436596 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="5a29c423-dcec-471e-9dd0-f1032e3f2dc4" containerName="glance-log" Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.436615 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="5a29c423-dcec-471e-9dd0-f1032e3f2dc4" containerName="glance-httpd" Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.439100 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.444561 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.444872 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.449197 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.489244 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/04247105-501f-4d3a-b624-7e1d64014fe8-scripts\") pod \"glance-default-internal-api-0\" (UID: \"04247105-501f-4d3a-b624-7e1d64014fe8\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.489302 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"04247105-501f-4d3a-b624-7e1d64014fe8\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.489361 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/04247105-501f-4d3a-b624-7e1d64014fe8-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"04247105-501f-4d3a-b624-7e1d64014fe8\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.489407 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04247105-501f-4d3a-b624-7e1d64014fe8-config-data\") pod \"glance-default-internal-api-0\" (UID: \"04247105-501f-4d3a-b624-7e1d64014fe8\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.489527 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-868w2\" (UniqueName: \"kubernetes.io/projected/04247105-501f-4d3a-b624-7e1d64014fe8-kube-api-access-868w2\") pod \"glance-default-internal-api-0\" (UID: \"04247105-501f-4d3a-b624-7e1d64014fe8\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.489567 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/04247105-501f-4d3a-b624-7e1d64014fe8-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"04247105-501f-4d3a-b624-7e1d64014fe8\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.489597 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/04247105-501f-4d3a-b624-7e1d64014fe8-logs\") pod \"glance-default-internal-api-0\" (UID: \"04247105-501f-4d3a-b624-7e1d64014fe8\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.489629 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04247105-501f-4d3a-b624-7e1d64014fe8-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"04247105-501f-4d3a-b624-7e1d64014fe8\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.489712 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/04247105-501f-4d3a-b624-7e1d64014fe8-ceph\") pod \"glance-default-internal-api-0\" (UID: \"04247105-501f-4d3a-b624-7e1d64014fe8\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.593196 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/04247105-501f-4d3a-b624-7e1d64014fe8-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"04247105-501f-4d3a-b624-7e1d64014fe8\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.593275 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04247105-501f-4d3a-b624-7e1d64014fe8-config-data\") pod \"glance-default-internal-api-0\" (UID: \"04247105-501f-4d3a-b624-7e1d64014fe8\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.593395 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-868w2\" (UniqueName: \"kubernetes.io/projected/04247105-501f-4d3a-b624-7e1d64014fe8-kube-api-access-868w2\") pod \"glance-default-internal-api-0\" (UID: \"04247105-501f-4d3a-b624-7e1d64014fe8\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.593427 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/04247105-501f-4d3a-b624-7e1d64014fe8-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"04247105-501f-4d3a-b624-7e1d64014fe8\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.593458 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/04247105-501f-4d3a-b624-7e1d64014fe8-logs\") pod \"glance-default-internal-api-0\" (UID: \"04247105-501f-4d3a-b624-7e1d64014fe8\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.593488 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04247105-501f-4d3a-b624-7e1d64014fe8-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"04247105-501f-4d3a-b624-7e1d64014fe8\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.593557 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/04247105-501f-4d3a-b624-7e1d64014fe8-ceph\") pod \"glance-default-internal-api-0\" (UID: \"04247105-501f-4d3a-b624-7e1d64014fe8\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.593610 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/04247105-501f-4d3a-b624-7e1d64014fe8-scripts\") pod \"glance-default-internal-api-0\" (UID: \"04247105-501f-4d3a-b624-7e1d64014fe8\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.593640 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"04247105-501f-4d3a-b624-7e1d64014fe8\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.593769 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/04247105-501f-4d3a-b624-7e1d64014fe8-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"04247105-501f-4d3a-b624-7e1d64014fe8\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.593985 4779 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"04247105-501f-4d3a-b624-7e1d64014fe8\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/glance-default-internal-api-0" Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.594090 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/04247105-501f-4d3a-b624-7e1d64014fe8-logs\") pod \"glance-default-internal-api-0\" (UID: \"04247105-501f-4d3a-b624-7e1d64014fe8\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.605556 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04247105-501f-4d3a-b624-7e1d64014fe8-config-data\") pod \"glance-default-internal-api-0\" (UID: \"04247105-501f-4d3a-b624-7e1d64014fe8\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.616282 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/04247105-501f-4d3a-b624-7e1d64014fe8-ceph\") pod \"glance-default-internal-api-0\" (UID: \"04247105-501f-4d3a-b624-7e1d64014fe8\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.620517 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/04247105-501f-4d3a-b624-7e1d64014fe8-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"04247105-501f-4d3a-b624-7e1d64014fe8\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.624396 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-868w2\" (UniqueName: \"kubernetes.io/projected/04247105-501f-4d3a-b624-7e1d64014fe8-kube-api-access-868w2\") pod \"glance-default-internal-api-0\" (UID: \"04247105-501f-4d3a-b624-7e1d64014fe8\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.637080 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04247105-501f-4d3a-b624-7e1d64014fe8-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"04247105-501f-4d3a-b624-7e1d64014fe8\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.658440 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/04247105-501f-4d3a-b624-7e1d64014fe8-scripts\") pod \"glance-default-internal-api-0\" (UID: \"04247105-501f-4d3a-b624-7e1d64014fe8\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.685893 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"04247105-501f-4d3a-b624-7e1d64014fe8\") " pod="openstack/glance-default-internal-api-0" Sep 29 10:25:39 crc kubenswrapper[4779]: I0929 10:25:39.811307 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 10:25:40 crc kubenswrapper[4779]: I0929 10:25:40.028222 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ae44fb6f-4ba0-4e8c-801e-ce14e3c69a78","Type":"ContainerStarted","Data":"09aa6abfc872155ae7d3f06037310f7b0ea6c89b21323cc48b011112bd0e34a6"} Sep 29 10:25:40 crc kubenswrapper[4779]: I0929 10:25:40.306699 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 10:25:40 crc kubenswrapper[4779]: W0929 10:25:40.354848 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod04247105_501f_4d3a_b624_7e1d64014fe8.slice/crio-c5e3ed059b1412b7232b9f83d5c58b600e131acd5f5f80aa1a349b2a8c7aa8b6 WatchSource:0}: Error finding container c5e3ed059b1412b7232b9f83d5c58b600e131acd5f5f80aa1a349b2a8c7aa8b6: Status 404 returned error can't find the container with id c5e3ed059b1412b7232b9f83d5c58b600e131acd5f5f80aa1a349b2a8c7aa8b6 Sep 29 10:25:40 crc kubenswrapper[4779]: I0929 10:25:40.746057 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5a29c423-dcec-471e-9dd0-f1032e3f2dc4" path="/var/lib/kubelet/pods/5a29c423-dcec-471e-9dd0-f1032e3f2dc4/volumes" Sep 29 10:25:40 crc kubenswrapper[4779]: I0929 10:25:40.786434 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-backup-0" Sep 29 10:25:40 crc kubenswrapper[4779]: I0929 10:25:40.909554 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-volume-volume1-0" Sep 29 10:25:41 crc kubenswrapper[4779]: I0929 10:25:41.048891 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ae44fb6f-4ba0-4e8c-801e-ce14e3c69a78","Type":"ContainerStarted","Data":"bf6e72f226dc6cb7b98bea8c35d50cc4e5d06ddf7b654171f65e0bd884fc0161"} Sep 29 10:25:41 crc kubenswrapper[4779]: I0929 10:25:41.050713 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"04247105-501f-4d3a-b624-7e1d64014fe8","Type":"ContainerStarted","Data":"c5e3ed059b1412b7232b9f83d5c58b600e131acd5f5f80aa1a349b2a8c7aa8b6"} Sep 29 10:25:41 crc kubenswrapper[4779]: I0929 10:25:41.618722 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-volume-volume2-0" Sep 29 10:25:42 crc kubenswrapper[4779]: I0929 10:25:42.068730 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ae44fb6f-4ba0-4e8c-801e-ce14e3c69a78","Type":"ContainerStarted","Data":"340075d0792ad404d51368e35faa08a902d3d9e998f75fc2d5221770f874eaa9"} Sep 29 10:25:42 crc kubenswrapper[4779]: I0929 10:25:42.076165 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"04247105-501f-4d3a-b624-7e1d64014fe8","Type":"ContainerStarted","Data":"b8703bcff32e404bdf68d769bf796d5d53e51e088473107ecf927e0234b85799"} Sep 29 10:25:42 crc kubenswrapper[4779]: I0929 10:25:42.100140 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.100113804 podStartE2EDuration="4.100113804s" podCreationTimestamp="2025-09-29 10:25:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 10:25:42.090048562 +0000 UTC m=+3374.071372486" watchObservedRunningTime="2025-09-29 10:25:42.100113804 +0000 UTC m=+3374.081437708" Sep 29 10:25:46 crc kubenswrapper[4779]: I0929 10:25:46.715786 4779 scope.go:117] "RemoveContainer" containerID="a993e5a0edbe6448eaf61b4b45ffde78e59ae455e408d7dd1afa5a0ff12dc338" Sep 29 10:25:46 crc kubenswrapper[4779]: E0929 10:25:46.716869 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:25:48 crc kubenswrapper[4779]: I0929 10:25:48.423747 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Sep 29 10:25:48 crc kubenswrapper[4779]: I0929 10:25:48.424359 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Sep 29 10:25:48 crc kubenswrapper[4779]: I0929 10:25:48.468197 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Sep 29 10:25:48 crc kubenswrapper[4779]: I0929 10:25:48.509253 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Sep 29 10:25:49 crc kubenswrapper[4779]: I0929 10:25:49.161354 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"04247105-501f-4d3a-b624-7e1d64014fe8","Type":"ContainerStarted","Data":"1cda71cab58560a4708d6b2bed992f90844bc6647a33450407daae129fbdc94f"} Sep 29 10:25:49 crc kubenswrapper[4779]: I0929 10:25:49.162104 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Sep 29 10:25:49 crc kubenswrapper[4779]: I0929 10:25:49.162135 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Sep 29 10:25:49 crc kubenswrapper[4779]: I0929 10:25:49.190935 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=10.190911254 podStartE2EDuration="10.190911254s" podCreationTimestamp="2025-09-29 10:25:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 10:25:49.1807918 +0000 UTC m=+3381.162115714" watchObservedRunningTime="2025-09-29 10:25:49.190911254 +0000 UTC m=+3381.172235158" Sep 29 10:25:49 crc kubenswrapper[4779]: I0929 10:25:49.813210 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Sep 29 10:25:49 crc kubenswrapper[4779]: I0929 10:25:49.813766 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Sep 29 10:25:49 crc kubenswrapper[4779]: I0929 10:25:49.850407 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Sep 29 10:25:49 crc kubenswrapper[4779]: I0929 10:25:49.859764 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Sep 29 10:25:50 crc kubenswrapper[4779]: I0929 10:25:50.175957 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7d46749f98-lswks" event={"ID":"4e4ac544-73b5-4ec4-975d-83eac168a331","Type":"ContainerStarted","Data":"a57f988c8b308b85aa146e5d34b5ad5b46c13fc8c5526aaa15f2e2f3e66e17a5"} Sep 29 10:25:50 crc kubenswrapper[4779]: I0929 10:25:50.179577 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-675f5c7fcc-8k9c8" event={"ID":"27d399f1-4a87-47f3-8b0c-6a28960f18aa","Type":"ContainerStarted","Data":"dc565f836847f313cd70faf3481411110cc545e71b90759d4ed39b4c30d06c8f"} Sep 29 10:25:50 crc kubenswrapper[4779]: I0929 10:25:50.180495 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Sep 29 10:25:50 crc kubenswrapper[4779]: I0929 10:25:50.180565 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Sep 29 10:25:51 crc kubenswrapper[4779]: I0929 10:25:51.192935 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-8658bb8f6f-ltnkm" event={"ID":"38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7","Type":"ContainerStarted","Data":"4e6de76c2aa358d2fc9f513d65e4dbbf23357bc9442f312127940031e69c56ee"} Sep 29 10:25:51 crc kubenswrapper[4779]: I0929 10:25:51.193620 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-8658bb8f6f-ltnkm" event={"ID":"38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7","Type":"ContainerStarted","Data":"4c6db0bebff6e85ea5d3c7c6156be31271593e39e1309e449ab8b4f1dcd21417"} Sep 29 10:25:51 crc kubenswrapper[4779]: I0929 10:25:51.193323 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-8658bb8f6f-ltnkm" podUID="38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7" containerName="horizon-log" containerID="cri-o://4c6db0bebff6e85ea5d3c7c6156be31271593e39e1309e449ab8b4f1dcd21417" gracePeriod=30 Sep 29 10:25:51 crc kubenswrapper[4779]: I0929 10:25:51.194101 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-8658bb8f6f-ltnkm" podUID="38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7" containerName="horizon" containerID="cri-o://4e6de76c2aa358d2fc9f513d65e4dbbf23357bc9442f312127940031e69c56ee" gracePeriod=30 Sep 29 10:25:51 crc kubenswrapper[4779]: I0929 10:25:51.196583 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-54748d8d4-lx86m" event={"ID":"17e72d33-8852-486a-8973-fe22a52f6e00","Type":"ContainerStarted","Data":"07ecb811035411289dfac6db74b1968bc043f172ee875213a6dda7c5548e7eeb"} Sep 29 10:25:51 crc kubenswrapper[4779]: I0929 10:25:51.196627 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-54748d8d4-lx86m" event={"ID":"17e72d33-8852-486a-8973-fe22a52f6e00","Type":"ContainerStarted","Data":"5e680707f3a347fd7f4484ad6282cb815da7d813f7713f1ffb62791b70956012"} Sep 29 10:25:51 crc kubenswrapper[4779]: I0929 10:25:51.199128 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7d46749f98-lswks" event={"ID":"4e4ac544-73b5-4ec4-975d-83eac168a331","Type":"ContainerStarted","Data":"6283fe2f79b31e97b63835721d0757408739826a511a152d645773dd41fa7cba"} Sep 29 10:25:51 crc kubenswrapper[4779]: I0929 10:25:51.210216 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-675f5c7fcc-8k9c8" podUID="27d399f1-4a87-47f3-8b0c-6a28960f18aa" containerName="horizon-log" containerID="cri-o://dc565f836847f313cd70faf3481411110cc545e71b90759d4ed39b4c30d06c8f" gracePeriod=30 Sep 29 10:25:51 crc kubenswrapper[4779]: I0929 10:25:51.210534 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-675f5c7fcc-8k9c8" event={"ID":"27d399f1-4a87-47f3-8b0c-6a28960f18aa","Type":"ContainerStarted","Data":"b2f388e3b42d31c98a040b9fec73f34e5254f450c56538390319c51048da6920"} Sep 29 10:25:51 crc kubenswrapper[4779]: I0929 10:25:51.210611 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-675f5c7fcc-8k9c8" podUID="27d399f1-4a87-47f3-8b0c-6a28960f18aa" containerName="horizon" containerID="cri-o://b2f388e3b42d31c98a040b9fec73f34e5254f450c56538390319c51048da6920" gracePeriod=30 Sep 29 10:25:51 crc kubenswrapper[4779]: I0929 10:25:51.222136 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-8658bb8f6f-ltnkm" podStartSLOduration=3.244812557 podStartE2EDuration="21.222107927s" podCreationTimestamp="2025-09-29 10:25:30 +0000 UTC" firstStartedPulling="2025-09-29 10:25:31.951425382 +0000 UTC m=+3363.932749286" lastFinishedPulling="2025-09-29 10:25:49.928720752 +0000 UTC m=+3381.910044656" observedRunningTime="2025-09-29 10:25:51.217522783 +0000 UTC m=+3383.198846707" watchObservedRunningTime="2025-09-29 10:25:51.222107927 +0000 UTC m=+3383.203431831" Sep 29 10:25:51 crc kubenswrapper[4779]: I0929 10:25:51.260251 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-7d46749f98-lswks" podStartSLOduration=4.179807497 podStartE2EDuration="18.260223815s" podCreationTimestamp="2025-09-29 10:25:33 +0000 UTC" firstStartedPulling="2025-09-29 10:25:35.482605798 +0000 UTC m=+3367.463929702" lastFinishedPulling="2025-09-29 10:25:49.563022096 +0000 UTC m=+3381.544346020" observedRunningTime="2025-09-29 10:25:51.254513309 +0000 UTC m=+3383.235837213" watchObservedRunningTime="2025-09-29 10:25:51.260223815 +0000 UTC m=+3383.241547719" Sep 29 10:25:51 crc kubenswrapper[4779]: I0929 10:25:51.294474 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-54748d8d4-lx86m" podStartSLOduration=2.66219356 podStartE2EDuration="18.294448631s" podCreationTimestamp="2025-09-29 10:25:33 +0000 UTC" firstStartedPulling="2025-09-29 10:25:34.799528312 +0000 UTC m=+3366.780852216" lastFinishedPulling="2025-09-29 10:25:50.431783383 +0000 UTC m=+3382.413107287" observedRunningTime="2025-09-29 10:25:51.274762218 +0000 UTC m=+3383.256086132" watchObservedRunningTime="2025-09-29 10:25:51.294448631 +0000 UTC m=+3383.275772535" Sep 29 10:25:51 crc kubenswrapper[4779]: I0929 10:25:51.320149 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-675f5c7fcc-8k9c8" podStartSLOduration=3.064297909 podStartE2EDuration="20.320113467s" podCreationTimestamp="2025-09-29 10:25:31 +0000 UTC" firstStartedPulling="2025-09-29 10:25:32.269472291 +0000 UTC m=+3364.250796195" lastFinishedPulling="2025-09-29 10:25:49.525287849 +0000 UTC m=+3381.506611753" observedRunningTime="2025-09-29 10:25:51.295425719 +0000 UTC m=+3383.276749623" watchObservedRunningTime="2025-09-29 10:25:51.320113467 +0000 UTC m=+3383.301437371" Sep 29 10:25:51 crc kubenswrapper[4779]: I0929 10:25:51.336465 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-8658bb8f6f-ltnkm" Sep 29 10:25:51 crc kubenswrapper[4779]: I0929 10:25:51.495615 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-675f5c7fcc-8k9c8" Sep 29 10:25:53 crc kubenswrapper[4779]: I0929 10:25:53.812825 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Sep 29 10:25:53 crc kubenswrapper[4779]: I0929 10:25:53.813840 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Sep 29 10:25:53 crc kubenswrapper[4779]: I0929 10:25:53.813867 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Sep 29 10:25:54 crc kubenswrapper[4779]: I0929 10:25:54.110066 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-54748d8d4-lx86m" Sep 29 10:25:54 crc kubenswrapper[4779]: I0929 10:25:54.112016 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-54748d8d4-lx86m" Sep 29 10:25:54 crc kubenswrapper[4779]: I0929 10:25:54.487651 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-7d46749f98-lswks" Sep 29 10:25:54 crc kubenswrapper[4779]: I0929 10:25:54.488047 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-7d46749f98-lswks" Sep 29 10:25:58 crc kubenswrapper[4779]: I0929 10:25:58.720864 4779 scope.go:117] "RemoveContainer" containerID="a993e5a0edbe6448eaf61b4b45ffde78e59ae455e408d7dd1afa5a0ff12dc338" Sep 29 10:25:58 crc kubenswrapper[4779]: E0929 10:25:58.721605 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:26:06 crc kubenswrapper[4779]: I0929 10:26:06.041589 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-54748d8d4-lx86m" Sep 29 10:26:06 crc kubenswrapper[4779]: I0929 10:26:06.428157 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-7d46749f98-lswks" Sep 29 10:26:07 crc kubenswrapper[4779]: I0929 10:26:07.831389 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-54748d8d4-lx86m" Sep 29 10:26:08 crc kubenswrapper[4779]: I0929 10:26:08.315138 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-7d46749f98-lswks" Sep 29 10:26:08 crc kubenswrapper[4779]: I0929 10:26:08.382537 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-54748d8d4-lx86m"] Sep 29 10:26:08 crc kubenswrapper[4779]: I0929 10:26:08.422485 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-54748d8d4-lx86m" podUID="17e72d33-8852-486a-8973-fe22a52f6e00" containerName="horizon-log" containerID="cri-o://5e680707f3a347fd7f4484ad6282cb815da7d813f7713f1ffb62791b70956012" gracePeriod=30 Sep 29 10:26:08 crc kubenswrapper[4779]: I0929 10:26:08.422927 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-54748d8d4-lx86m" podUID="17e72d33-8852-486a-8973-fe22a52f6e00" containerName="horizon" containerID="cri-o://07ecb811035411289dfac6db74b1968bc043f172ee875213a6dda7c5548e7eeb" gracePeriod=30 Sep 29 10:26:10 crc kubenswrapper[4779]: I0929 10:26:10.448315 4779 generic.go:334] "Generic (PLEG): container finished" podID="17e72d33-8852-486a-8973-fe22a52f6e00" containerID="07ecb811035411289dfac6db74b1968bc043f172ee875213a6dda7c5548e7eeb" exitCode=0 Sep 29 10:26:10 crc kubenswrapper[4779]: I0929 10:26:10.448849 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-54748d8d4-lx86m" event={"ID":"17e72d33-8852-486a-8973-fe22a52f6e00","Type":"ContainerDied","Data":"07ecb811035411289dfac6db74b1968bc043f172ee875213a6dda7c5548e7eeb"} Sep 29 10:26:10 crc kubenswrapper[4779]: I0929 10:26:10.725817 4779 scope.go:117] "RemoveContainer" containerID="a993e5a0edbe6448eaf61b4b45ffde78e59ae455e408d7dd1afa5a0ff12dc338" Sep 29 10:26:10 crc kubenswrapper[4779]: E0929 10:26:10.728742 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:26:11 crc kubenswrapper[4779]: I0929 10:26:11.820044 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Sep 29 10:26:13 crc kubenswrapper[4779]: I0929 10:26:13.020578 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-l6rcz"] Sep 29 10:26:13 crc kubenswrapper[4779]: I0929 10:26:13.023556 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-l6rcz" Sep 29 10:26:13 crc kubenswrapper[4779]: I0929 10:26:13.039650 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-l6rcz"] Sep 29 10:26:13 crc kubenswrapper[4779]: I0929 10:26:13.062316 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dxvxc\" (UniqueName: \"kubernetes.io/projected/e31680be-2d9f-47da-998c-c2ee8c71eb1f-kube-api-access-dxvxc\") pod \"community-operators-l6rcz\" (UID: \"e31680be-2d9f-47da-998c-c2ee8c71eb1f\") " pod="openshift-marketplace/community-operators-l6rcz" Sep 29 10:26:13 crc kubenswrapper[4779]: I0929 10:26:13.062393 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e31680be-2d9f-47da-998c-c2ee8c71eb1f-utilities\") pod \"community-operators-l6rcz\" (UID: \"e31680be-2d9f-47da-998c-c2ee8c71eb1f\") " pod="openshift-marketplace/community-operators-l6rcz" Sep 29 10:26:13 crc kubenswrapper[4779]: I0929 10:26:13.062734 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e31680be-2d9f-47da-998c-c2ee8c71eb1f-catalog-content\") pod \"community-operators-l6rcz\" (UID: \"e31680be-2d9f-47da-998c-c2ee8c71eb1f\") " pod="openshift-marketplace/community-operators-l6rcz" Sep 29 10:26:13 crc kubenswrapper[4779]: I0929 10:26:13.164624 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e31680be-2d9f-47da-998c-c2ee8c71eb1f-catalog-content\") pod \"community-operators-l6rcz\" (UID: \"e31680be-2d9f-47da-998c-c2ee8c71eb1f\") " pod="openshift-marketplace/community-operators-l6rcz" Sep 29 10:26:13 crc kubenswrapper[4779]: I0929 10:26:13.165191 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dxvxc\" (UniqueName: \"kubernetes.io/projected/e31680be-2d9f-47da-998c-c2ee8c71eb1f-kube-api-access-dxvxc\") pod \"community-operators-l6rcz\" (UID: \"e31680be-2d9f-47da-998c-c2ee8c71eb1f\") " pod="openshift-marketplace/community-operators-l6rcz" Sep 29 10:26:13 crc kubenswrapper[4779]: I0929 10:26:13.165550 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e31680be-2d9f-47da-998c-c2ee8c71eb1f-utilities\") pod \"community-operators-l6rcz\" (UID: \"e31680be-2d9f-47da-998c-c2ee8c71eb1f\") " pod="openshift-marketplace/community-operators-l6rcz" Sep 29 10:26:13 crc kubenswrapper[4779]: I0929 10:26:13.165133 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e31680be-2d9f-47da-998c-c2ee8c71eb1f-catalog-content\") pod \"community-operators-l6rcz\" (UID: \"e31680be-2d9f-47da-998c-c2ee8c71eb1f\") " pod="openshift-marketplace/community-operators-l6rcz" Sep 29 10:26:13 crc kubenswrapper[4779]: I0929 10:26:13.166009 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e31680be-2d9f-47da-998c-c2ee8c71eb1f-utilities\") pod \"community-operators-l6rcz\" (UID: \"e31680be-2d9f-47da-998c-c2ee8c71eb1f\") " pod="openshift-marketplace/community-operators-l6rcz" Sep 29 10:26:13 crc kubenswrapper[4779]: I0929 10:26:13.192521 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dxvxc\" (UniqueName: \"kubernetes.io/projected/e31680be-2d9f-47da-998c-c2ee8c71eb1f-kube-api-access-dxvxc\") pod \"community-operators-l6rcz\" (UID: \"e31680be-2d9f-47da-998c-c2ee8c71eb1f\") " pod="openshift-marketplace/community-operators-l6rcz" Sep 29 10:26:13 crc kubenswrapper[4779]: I0929 10:26:13.360002 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-l6rcz" Sep 29 10:26:14 crc kubenswrapper[4779]: I0929 10:26:14.059166 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-l6rcz"] Sep 29 10:26:14 crc kubenswrapper[4779]: I0929 10:26:14.108716 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-54748d8d4-lx86m" podUID="17e72d33-8852-486a-8973-fe22a52f6e00" containerName="horizon" probeResult="failure" output="Get \"https://10.217.1.8:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.8:8443: connect: connection refused" Sep 29 10:26:14 crc kubenswrapper[4779]: I0929 10:26:14.509637 4779 generic.go:334] "Generic (PLEG): container finished" podID="e31680be-2d9f-47da-998c-c2ee8c71eb1f" containerID="dc91e1fb322963176eee25b6bd990d318149a0d9db7b2bb750906d037dae2d81" exitCode=0 Sep 29 10:26:14 crc kubenswrapper[4779]: I0929 10:26:14.509698 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l6rcz" event={"ID":"e31680be-2d9f-47da-998c-c2ee8c71eb1f","Type":"ContainerDied","Data":"dc91e1fb322963176eee25b6bd990d318149a0d9db7b2bb750906d037dae2d81"} Sep 29 10:26:14 crc kubenswrapper[4779]: I0929 10:26:14.509735 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l6rcz" event={"ID":"e31680be-2d9f-47da-998c-c2ee8c71eb1f","Type":"ContainerStarted","Data":"704bc7d22dd10fbcd8413e0a30933ce1c2aded8278aae361da4a05487d72c578"} Sep 29 10:26:16 crc kubenswrapper[4779]: I0929 10:26:16.534998 4779 generic.go:334] "Generic (PLEG): container finished" podID="e31680be-2d9f-47da-998c-c2ee8c71eb1f" containerID="b966ac2916afda7757e87ae2c3995f3765c4f35b6d76fd55b83ebcd6af5b33ac" exitCode=0 Sep 29 10:26:16 crc kubenswrapper[4779]: I0929 10:26:16.535060 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l6rcz" event={"ID":"e31680be-2d9f-47da-998c-c2ee8c71eb1f","Type":"ContainerDied","Data":"b966ac2916afda7757e87ae2c3995f3765c4f35b6d76fd55b83ebcd6af5b33ac"} Sep 29 10:26:18 crc kubenswrapper[4779]: I0929 10:26:18.558845 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l6rcz" event={"ID":"e31680be-2d9f-47da-998c-c2ee8c71eb1f","Type":"ContainerStarted","Data":"20ad142282b3f2953305bcc96b6cd7fffe14a97cdd4de499abfb1fca9aded3d3"} Sep 29 10:26:18 crc kubenswrapper[4779]: I0929 10:26:18.586314 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-l6rcz" podStartSLOduration=3.7007465760000002 podStartE2EDuration="6.586286755s" podCreationTimestamp="2025-09-29 10:26:12 +0000 UTC" firstStartedPulling="2025-09-29 10:26:14.514118265 +0000 UTC m=+3406.495442169" lastFinishedPulling="2025-09-29 10:26:17.399658444 +0000 UTC m=+3409.380982348" observedRunningTime="2025-09-29 10:26:18.583485824 +0000 UTC m=+3410.564809738" watchObservedRunningTime="2025-09-29 10:26:18.586286755 +0000 UTC m=+3410.567610659" Sep 29 10:26:21 crc kubenswrapper[4779]: I0929 10:26:21.595053 4779 generic.go:334] "Generic (PLEG): container finished" podID="27d399f1-4a87-47f3-8b0c-6a28960f18aa" containerID="b2f388e3b42d31c98a040b9fec73f34e5254f450c56538390319c51048da6920" exitCode=137 Sep 29 10:26:21 crc kubenswrapper[4779]: I0929 10:26:21.595976 4779 generic.go:334] "Generic (PLEG): container finished" podID="27d399f1-4a87-47f3-8b0c-6a28960f18aa" containerID="dc565f836847f313cd70faf3481411110cc545e71b90759d4ed39b4c30d06c8f" exitCode=137 Sep 29 10:26:21 crc kubenswrapper[4779]: I0929 10:26:21.596061 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-675f5c7fcc-8k9c8" event={"ID":"27d399f1-4a87-47f3-8b0c-6a28960f18aa","Type":"ContainerDied","Data":"b2f388e3b42d31c98a040b9fec73f34e5254f450c56538390319c51048da6920"} Sep 29 10:26:21 crc kubenswrapper[4779]: I0929 10:26:21.596097 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-675f5c7fcc-8k9c8" event={"ID":"27d399f1-4a87-47f3-8b0c-6a28960f18aa","Type":"ContainerDied","Data":"dc565f836847f313cd70faf3481411110cc545e71b90759d4ed39b4c30d06c8f"} Sep 29 10:26:21 crc kubenswrapper[4779]: I0929 10:26:21.596108 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-675f5c7fcc-8k9c8" event={"ID":"27d399f1-4a87-47f3-8b0c-6a28960f18aa","Type":"ContainerDied","Data":"8cfc15092a133a3017db09e6b305d86de2463a4a92a0a9e9b868fd91f1f17011"} Sep 29 10:26:21 crc kubenswrapper[4779]: I0929 10:26:21.596118 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8cfc15092a133a3017db09e6b305d86de2463a4a92a0a9e9b868fd91f1f17011" Sep 29 10:26:21 crc kubenswrapper[4779]: I0929 10:26:21.608683 4779 generic.go:334] "Generic (PLEG): container finished" podID="38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7" containerID="4e6de76c2aa358d2fc9f513d65e4dbbf23357bc9442f312127940031e69c56ee" exitCode=137 Sep 29 10:26:21 crc kubenswrapper[4779]: I0929 10:26:21.608731 4779 generic.go:334] "Generic (PLEG): container finished" podID="38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7" containerID="4c6db0bebff6e85ea5d3c7c6156be31271593e39e1309e449ab8b4f1dcd21417" exitCode=137 Sep 29 10:26:21 crc kubenswrapper[4779]: I0929 10:26:21.608758 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-8658bb8f6f-ltnkm" event={"ID":"38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7","Type":"ContainerDied","Data":"4e6de76c2aa358d2fc9f513d65e4dbbf23357bc9442f312127940031e69c56ee"} Sep 29 10:26:21 crc kubenswrapper[4779]: I0929 10:26:21.608814 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-8658bb8f6f-ltnkm" event={"ID":"38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7","Type":"ContainerDied","Data":"4c6db0bebff6e85ea5d3c7c6156be31271593e39e1309e449ab8b4f1dcd21417"} Sep 29 10:26:21 crc kubenswrapper[4779]: I0929 10:26:21.689847 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-675f5c7fcc-8k9c8" Sep 29 10:26:21 crc kubenswrapper[4779]: I0929 10:26:21.703761 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-8658bb8f6f-ltnkm" Sep 29 10:26:21 crc kubenswrapper[4779]: I0929 10:26:21.813194 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/27d399f1-4a87-47f3-8b0c-6a28960f18aa-config-data\") pod \"27d399f1-4a87-47f3-8b0c-6a28960f18aa\" (UID: \"27d399f1-4a87-47f3-8b0c-6a28960f18aa\") " Sep 29 10:26:21 crc kubenswrapper[4779]: I0929 10:26:21.813286 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7-logs\") pod \"38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7\" (UID: \"38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7\") " Sep 29 10:26:21 crc kubenswrapper[4779]: I0929 10:26:21.813396 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7-config-data\") pod \"38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7\" (UID: \"38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7\") " Sep 29 10:26:21 crc kubenswrapper[4779]: I0929 10:26:21.813491 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/27d399f1-4a87-47f3-8b0c-6a28960f18aa-scripts\") pod \"27d399f1-4a87-47f3-8b0c-6a28960f18aa\" (UID: \"27d399f1-4a87-47f3-8b0c-6a28960f18aa\") " Sep 29 10:26:21 crc kubenswrapper[4779]: I0929 10:26:21.813758 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7-horizon-secret-key\") pod \"38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7\" (UID: \"38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7\") " Sep 29 10:26:21 crc kubenswrapper[4779]: I0929 10:26:21.813870 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nkzrb\" (UniqueName: \"kubernetes.io/projected/27d399f1-4a87-47f3-8b0c-6a28960f18aa-kube-api-access-nkzrb\") pod \"27d399f1-4a87-47f3-8b0c-6a28960f18aa\" (UID: \"27d399f1-4a87-47f3-8b0c-6a28960f18aa\") " Sep 29 10:26:21 crc kubenswrapper[4779]: I0929 10:26:21.813891 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/27d399f1-4a87-47f3-8b0c-6a28960f18aa-horizon-secret-key\") pod \"27d399f1-4a87-47f3-8b0c-6a28960f18aa\" (UID: \"27d399f1-4a87-47f3-8b0c-6a28960f18aa\") " Sep 29 10:26:21 crc kubenswrapper[4779]: I0929 10:26:21.813969 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wp947\" (UniqueName: \"kubernetes.io/projected/38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7-kube-api-access-wp947\") pod \"38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7\" (UID: \"38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7\") " Sep 29 10:26:21 crc kubenswrapper[4779]: I0929 10:26:21.813993 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7-scripts\") pod \"38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7\" (UID: \"38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7\") " Sep 29 10:26:21 crc kubenswrapper[4779]: I0929 10:26:21.814126 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7-logs" (OuterVolumeSpecName: "logs") pod "38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7" (UID: "38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 10:26:21 crc kubenswrapper[4779]: I0929 10:26:21.814234 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/27d399f1-4a87-47f3-8b0c-6a28960f18aa-logs\") pod \"27d399f1-4a87-47f3-8b0c-6a28960f18aa\" (UID: \"27d399f1-4a87-47f3-8b0c-6a28960f18aa\") " Sep 29 10:26:21 crc kubenswrapper[4779]: I0929 10:26:21.814869 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/27d399f1-4a87-47f3-8b0c-6a28960f18aa-logs" (OuterVolumeSpecName: "logs") pod "27d399f1-4a87-47f3-8b0c-6a28960f18aa" (UID: "27d399f1-4a87-47f3-8b0c-6a28960f18aa"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 10:26:21 crc kubenswrapper[4779]: I0929 10:26:21.815113 4779 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/27d399f1-4a87-47f3-8b0c-6a28960f18aa-logs\") on node \"crc\" DevicePath \"\"" Sep 29 10:26:21 crc kubenswrapper[4779]: I0929 10:26:21.815129 4779 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7-logs\") on node \"crc\" DevicePath \"\"" Sep 29 10:26:21 crc kubenswrapper[4779]: I0929 10:26:21.820410 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7-kube-api-access-wp947" (OuterVolumeSpecName: "kube-api-access-wp947") pod "38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7" (UID: "38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7"). InnerVolumeSpecName "kube-api-access-wp947". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 10:26:21 crc kubenswrapper[4779]: I0929 10:26:21.820981 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/27d399f1-4a87-47f3-8b0c-6a28960f18aa-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "27d399f1-4a87-47f3-8b0c-6a28960f18aa" (UID: "27d399f1-4a87-47f3-8b0c-6a28960f18aa"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:26:21 crc kubenswrapper[4779]: I0929 10:26:21.821008 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7" (UID: "38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:26:21 crc kubenswrapper[4779]: I0929 10:26:21.821069 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/27d399f1-4a87-47f3-8b0c-6a28960f18aa-kube-api-access-nkzrb" (OuterVolumeSpecName: "kube-api-access-nkzrb") pod "27d399f1-4a87-47f3-8b0c-6a28960f18aa" (UID: "27d399f1-4a87-47f3-8b0c-6a28960f18aa"). InnerVolumeSpecName "kube-api-access-nkzrb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 10:26:21 crc kubenswrapper[4779]: I0929 10:26:21.841960 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/27d399f1-4a87-47f3-8b0c-6a28960f18aa-scripts" (OuterVolumeSpecName: "scripts") pod "27d399f1-4a87-47f3-8b0c-6a28960f18aa" (UID: "27d399f1-4a87-47f3-8b0c-6a28960f18aa"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 10:26:21 crc kubenswrapper[4779]: I0929 10:26:21.842058 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7-config-data" (OuterVolumeSpecName: "config-data") pod "38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7" (UID: "38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 10:26:21 crc kubenswrapper[4779]: I0929 10:26:21.842891 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/27d399f1-4a87-47f3-8b0c-6a28960f18aa-config-data" (OuterVolumeSpecName: "config-data") pod "27d399f1-4a87-47f3-8b0c-6a28960f18aa" (UID: "27d399f1-4a87-47f3-8b0c-6a28960f18aa"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 10:26:21 crc kubenswrapper[4779]: I0929 10:26:21.845979 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7-scripts" (OuterVolumeSpecName: "scripts") pod "38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7" (UID: "38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 10:26:21 crc kubenswrapper[4779]: I0929 10:26:21.916753 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wp947\" (UniqueName: \"kubernetes.io/projected/38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7-kube-api-access-wp947\") on node \"crc\" DevicePath \"\"" Sep 29 10:26:21 crc kubenswrapper[4779]: I0929 10:26:21.916814 4779 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 10:26:21 crc kubenswrapper[4779]: I0929 10:26:21.916828 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/27d399f1-4a87-47f3-8b0c-6a28960f18aa-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 10:26:21 crc kubenswrapper[4779]: I0929 10:26:21.916844 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 10:26:21 crc kubenswrapper[4779]: I0929 10:26:21.916854 4779 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/27d399f1-4a87-47f3-8b0c-6a28960f18aa-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 10:26:21 crc kubenswrapper[4779]: I0929 10:26:21.916868 4779 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Sep 29 10:26:21 crc kubenswrapper[4779]: I0929 10:26:21.916882 4779 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/27d399f1-4a87-47f3-8b0c-6a28960f18aa-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Sep 29 10:26:21 crc kubenswrapper[4779]: I0929 10:26:21.916893 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nkzrb\" (UniqueName: \"kubernetes.io/projected/27d399f1-4a87-47f3-8b0c-6a28960f18aa-kube-api-access-nkzrb\") on node \"crc\" DevicePath \"\"" Sep 29 10:26:22 crc kubenswrapper[4779]: I0929 10:26:22.625471 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-8658bb8f6f-ltnkm" event={"ID":"38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7","Type":"ContainerDied","Data":"c2963ceef4cf641bd72fcc634360728b3c5e2ca0f357cb26e376aa89e08c9c41"} Sep 29 10:26:22 crc kubenswrapper[4779]: I0929 10:26:22.625562 4779 scope.go:117] "RemoveContainer" containerID="4e6de76c2aa358d2fc9f513d65e4dbbf23357bc9442f312127940031e69c56ee" Sep 29 10:26:22 crc kubenswrapper[4779]: I0929 10:26:22.625585 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-8658bb8f6f-ltnkm" Sep 29 10:26:22 crc kubenswrapper[4779]: I0929 10:26:22.626624 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-675f5c7fcc-8k9c8" Sep 29 10:26:22 crc kubenswrapper[4779]: I0929 10:26:22.681539 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-675f5c7fcc-8k9c8"] Sep 29 10:26:22 crc kubenswrapper[4779]: I0929 10:26:22.701037 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-675f5c7fcc-8k9c8"] Sep 29 10:26:22 crc kubenswrapper[4779]: I0929 10:26:22.736738 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="27d399f1-4a87-47f3-8b0c-6a28960f18aa" path="/var/lib/kubelet/pods/27d399f1-4a87-47f3-8b0c-6a28960f18aa/volumes" Sep 29 10:26:22 crc kubenswrapper[4779]: I0929 10:26:22.737722 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-8658bb8f6f-ltnkm"] Sep 29 10:26:22 crc kubenswrapper[4779]: I0929 10:26:22.737765 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-8658bb8f6f-ltnkm"] Sep 29 10:26:22 crc kubenswrapper[4779]: I0929 10:26:22.805278 4779 scope.go:117] "RemoveContainer" containerID="4c6db0bebff6e85ea5d3c7c6156be31271593e39e1309e449ab8b4f1dcd21417" Sep 29 10:26:23 crc kubenswrapper[4779]: I0929 10:26:23.360200 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-l6rcz" Sep 29 10:26:23 crc kubenswrapper[4779]: I0929 10:26:23.360272 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-l6rcz" Sep 29 10:26:23 crc kubenswrapper[4779]: I0929 10:26:23.419089 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-l6rcz" Sep 29 10:26:23 crc kubenswrapper[4779]: I0929 10:26:23.693549 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-l6rcz" Sep 29 10:26:23 crc kubenswrapper[4779]: I0929 10:26:23.760790 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-l6rcz"] Sep 29 10:26:24 crc kubenswrapper[4779]: I0929 10:26:24.108658 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-54748d8d4-lx86m" podUID="17e72d33-8852-486a-8973-fe22a52f6e00" containerName="horizon" probeResult="failure" output="Get \"https://10.217.1.8:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.8:8443: connect: connection refused" Sep 29 10:26:24 crc kubenswrapper[4779]: I0929 10:26:24.715029 4779 scope.go:117] "RemoveContainer" containerID="a993e5a0edbe6448eaf61b4b45ffde78e59ae455e408d7dd1afa5a0ff12dc338" Sep 29 10:26:24 crc kubenswrapper[4779]: E0929 10:26:24.715400 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:26:24 crc kubenswrapper[4779]: I0929 10:26:24.730249 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7" path="/var/lib/kubelet/pods/38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7/volumes" Sep 29 10:26:25 crc kubenswrapper[4779]: I0929 10:26:25.659556 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-l6rcz" podUID="e31680be-2d9f-47da-998c-c2ee8c71eb1f" containerName="registry-server" containerID="cri-o://20ad142282b3f2953305bcc96b6cd7fffe14a97cdd4de499abfb1fca9aded3d3" gracePeriod=2 Sep 29 10:26:26 crc kubenswrapper[4779]: I0929 10:26:26.674602 4779 generic.go:334] "Generic (PLEG): container finished" podID="e31680be-2d9f-47da-998c-c2ee8c71eb1f" containerID="20ad142282b3f2953305bcc96b6cd7fffe14a97cdd4de499abfb1fca9aded3d3" exitCode=0 Sep 29 10:26:26 crc kubenswrapper[4779]: I0929 10:26:26.674943 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l6rcz" event={"ID":"e31680be-2d9f-47da-998c-c2ee8c71eb1f","Type":"ContainerDied","Data":"20ad142282b3f2953305bcc96b6cd7fffe14a97cdd4de499abfb1fca9aded3d3"} Sep 29 10:26:26 crc kubenswrapper[4779]: I0929 10:26:26.675414 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l6rcz" event={"ID":"e31680be-2d9f-47da-998c-c2ee8c71eb1f","Type":"ContainerDied","Data":"704bc7d22dd10fbcd8413e0a30933ce1c2aded8278aae361da4a05487d72c578"} Sep 29 10:26:26 crc kubenswrapper[4779]: I0929 10:26:26.675445 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="704bc7d22dd10fbcd8413e0a30933ce1c2aded8278aae361da4a05487d72c578" Sep 29 10:26:26 crc kubenswrapper[4779]: I0929 10:26:26.735714 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-l6rcz" Sep 29 10:26:26 crc kubenswrapper[4779]: I0929 10:26:26.864887 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dxvxc\" (UniqueName: \"kubernetes.io/projected/e31680be-2d9f-47da-998c-c2ee8c71eb1f-kube-api-access-dxvxc\") pod \"e31680be-2d9f-47da-998c-c2ee8c71eb1f\" (UID: \"e31680be-2d9f-47da-998c-c2ee8c71eb1f\") " Sep 29 10:26:26 crc kubenswrapper[4779]: I0929 10:26:26.865049 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e31680be-2d9f-47da-998c-c2ee8c71eb1f-catalog-content\") pod \"e31680be-2d9f-47da-998c-c2ee8c71eb1f\" (UID: \"e31680be-2d9f-47da-998c-c2ee8c71eb1f\") " Sep 29 10:26:26 crc kubenswrapper[4779]: I0929 10:26:26.865401 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e31680be-2d9f-47da-998c-c2ee8c71eb1f-utilities\") pod \"e31680be-2d9f-47da-998c-c2ee8c71eb1f\" (UID: \"e31680be-2d9f-47da-998c-c2ee8c71eb1f\") " Sep 29 10:26:26 crc kubenswrapper[4779]: I0929 10:26:26.866927 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e31680be-2d9f-47da-998c-c2ee8c71eb1f-utilities" (OuterVolumeSpecName: "utilities") pod "e31680be-2d9f-47da-998c-c2ee8c71eb1f" (UID: "e31680be-2d9f-47da-998c-c2ee8c71eb1f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 10:26:26 crc kubenswrapper[4779]: I0929 10:26:26.876528 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e31680be-2d9f-47da-998c-c2ee8c71eb1f-kube-api-access-dxvxc" (OuterVolumeSpecName: "kube-api-access-dxvxc") pod "e31680be-2d9f-47da-998c-c2ee8c71eb1f" (UID: "e31680be-2d9f-47da-998c-c2ee8c71eb1f"). InnerVolumeSpecName "kube-api-access-dxvxc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 10:26:26 crc kubenswrapper[4779]: I0929 10:26:26.935213 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e31680be-2d9f-47da-998c-c2ee8c71eb1f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e31680be-2d9f-47da-998c-c2ee8c71eb1f" (UID: "e31680be-2d9f-47da-998c-c2ee8c71eb1f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 10:26:26 crc kubenswrapper[4779]: I0929 10:26:26.969768 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e31680be-2d9f-47da-998c-c2ee8c71eb1f-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 10:26:26 crc kubenswrapper[4779]: I0929 10:26:26.970216 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dxvxc\" (UniqueName: \"kubernetes.io/projected/e31680be-2d9f-47da-998c-c2ee8c71eb1f-kube-api-access-dxvxc\") on node \"crc\" DevicePath \"\"" Sep 29 10:26:26 crc kubenswrapper[4779]: I0929 10:26:26.970229 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e31680be-2d9f-47da-998c-c2ee8c71eb1f-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 10:26:27 crc kubenswrapper[4779]: I0929 10:26:27.683630 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-l6rcz" Sep 29 10:26:27 crc kubenswrapper[4779]: I0929 10:26:27.727470 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-l6rcz"] Sep 29 10:26:27 crc kubenswrapper[4779]: I0929 10:26:27.740863 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-l6rcz"] Sep 29 10:26:28 crc kubenswrapper[4779]: I0929 10:26:28.736320 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e31680be-2d9f-47da-998c-c2ee8c71eb1f" path="/var/lib/kubelet/pods/e31680be-2d9f-47da-998c-c2ee8c71eb1f/volumes" Sep 29 10:26:34 crc kubenswrapper[4779]: I0929 10:26:34.108998 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-54748d8d4-lx86m" podUID="17e72d33-8852-486a-8973-fe22a52f6e00" containerName="horizon" probeResult="failure" output="Get \"https://10.217.1.8:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.8:8443: connect: connection refused" Sep 29 10:26:34 crc kubenswrapper[4779]: I0929 10:26:34.110061 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-54748d8d4-lx86m" Sep 29 10:26:35 crc kubenswrapper[4779]: I0929 10:26:35.525984 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-bhs8c"] Sep 29 10:26:35 crc kubenswrapper[4779]: E0929 10:26:35.527059 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7" containerName="horizon-log" Sep 29 10:26:35 crc kubenswrapper[4779]: I0929 10:26:35.527077 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7" containerName="horizon-log" Sep 29 10:26:35 crc kubenswrapper[4779]: E0929 10:26:35.527098 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e31680be-2d9f-47da-998c-c2ee8c71eb1f" containerName="extract-utilities" Sep 29 10:26:35 crc kubenswrapper[4779]: I0929 10:26:35.527107 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="e31680be-2d9f-47da-998c-c2ee8c71eb1f" containerName="extract-utilities" Sep 29 10:26:35 crc kubenswrapper[4779]: E0929 10:26:35.527127 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7" containerName="horizon" Sep 29 10:26:35 crc kubenswrapper[4779]: I0929 10:26:35.527135 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7" containerName="horizon" Sep 29 10:26:35 crc kubenswrapper[4779]: E0929 10:26:35.527161 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e31680be-2d9f-47da-998c-c2ee8c71eb1f" containerName="registry-server" Sep 29 10:26:35 crc kubenswrapper[4779]: I0929 10:26:35.527169 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="e31680be-2d9f-47da-998c-c2ee8c71eb1f" containerName="registry-server" Sep 29 10:26:35 crc kubenswrapper[4779]: E0929 10:26:35.527191 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27d399f1-4a87-47f3-8b0c-6a28960f18aa" containerName="horizon-log" Sep 29 10:26:35 crc kubenswrapper[4779]: I0929 10:26:35.527200 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="27d399f1-4a87-47f3-8b0c-6a28960f18aa" containerName="horizon-log" Sep 29 10:26:35 crc kubenswrapper[4779]: E0929 10:26:35.527215 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27d399f1-4a87-47f3-8b0c-6a28960f18aa" containerName="horizon" Sep 29 10:26:35 crc kubenswrapper[4779]: I0929 10:26:35.527223 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="27d399f1-4a87-47f3-8b0c-6a28960f18aa" containerName="horizon" Sep 29 10:26:35 crc kubenswrapper[4779]: E0929 10:26:35.527237 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e31680be-2d9f-47da-998c-c2ee8c71eb1f" containerName="extract-content" Sep 29 10:26:35 crc kubenswrapper[4779]: I0929 10:26:35.527244 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="e31680be-2d9f-47da-998c-c2ee8c71eb1f" containerName="extract-content" Sep 29 10:26:35 crc kubenswrapper[4779]: I0929 10:26:35.527499 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="27d399f1-4a87-47f3-8b0c-6a28960f18aa" containerName="horizon-log" Sep 29 10:26:35 crc kubenswrapper[4779]: I0929 10:26:35.527520 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7" containerName="horizon" Sep 29 10:26:35 crc kubenswrapper[4779]: I0929 10:26:35.527574 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="e31680be-2d9f-47da-998c-c2ee8c71eb1f" containerName="registry-server" Sep 29 10:26:35 crc kubenswrapper[4779]: I0929 10:26:35.527595 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="38d361d5-23c4-4a86-aea1-4cbc5b8ab3b7" containerName="horizon-log" Sep 29 10:26:35 crc kubenswrapper[4779]: I0929 10:26:35.527610 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="27d399f1-4a87-47f3-8b0c-6a28960f18aa" containerName="horizon" Sep 29 10:26:35 crc kubenswrapper[4779]: I0929 10:26:35.529622 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bhs8c" Sep 29 10:26:35 crc kubenswrapper[4779]: I0929 10:26:35.537281 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-bhs8c"] Sep 29 10:26:35 crc kubenswrapper[4779]: I0929 10:26:35.579780 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/591ac1c1-65cb-4471-a9ee-8e4d17e9d395-utilities\") pod \"certified-operators-bhs8c\" (UID: \"591ac1c1-65cb-4471-a9ee-8e4d17e9d395\") " pod="openshift-marketplace/certified-operators-bhs8c" Sep 29 10:26:35 crc kubenswrapper[4779]: I0929 10:26:35.579866 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/591ac1c1-65cb-4471-a9ee-8e4d17e9d395-catalog-content\") pod \"certified-operators-bhs8c\" (UID: \"591ac1c1-65cb-4471-a9ee-8e4d17e9d395\") " pod="openshift-marketplace/certified-operators-bhs8c" Sep 29 10:26:35 crc kubenswrapper[4779]: I0929 10:26:35.580269 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gkj4d\" (UniqueName: \"kubernetes.io/projected/591ac1c1-65cb-4471-a9ee-8e4d17e9d395-kube-api-access-gkj4d\") pod \"certified-operators-bhs8c\" (UID: \"591ac1c1-65cb-4471-a9ee-8e4d17e9d395\") " pod="openshift-marketplace/certified-operators-bhs8c" Sep 29 10:26:35 crc kubenswrapper[4779]: I0929 10:26:35.682494 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/591ac1c1-65cb-4471-a9ee-8e4d17e9d395-utilities\") pod \"certified-operators-bhs8c\" (UID: \"591ac1c1-65cb-4471-a9ee-8e4d17e9d395\") " pod="openshift-marketplace/certified-operators-bhs8c" Sep 29 10:26:35 crc kubenswrapper[4779]: I0929 10:26:35.682591 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/591ac1c1-65cb-4471-a9ee-8e4d17e9d395-catalog-content\") pod \"certified-operators-bhs8c\" (UID: \"591ac1c1-65cb-4471-a9ee-8e4d17e9d395\") " pod="openshift-marketplace/certified-operators-bhs8c" Sep 29 10:26:35 crc kubenswrapper[4779]: I0929 10:26:35.682746 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gkj4d\" (UniqueName: \"kubernetes.io/projected/591ac1c1-65cb-4471-a9ee-8e4d17e9d395-kube-api-access-gkj4d\") pod \"certified-operators-bhs8c\" (UID: \"591ac1c1-65cb-4471-a9ee-8e4d17e9d395\") " pod="openshift-marketplace/certified-operators-bhs8c" Sep 29 10:26:35 crc kubenswrapper[4779]: I0929 10:26:35.683088 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/591ac1c1-65cb-4471-a9ee-8e4d17e9d395-utilities\") pod \"certified-operators-bhs8c\" (UID: \"591ac1c1-65cb-4471-a9ee-8e4d17e9d395\") " pod="openshift-marketplace/certified-operators-bhs8c" Sep 29 10:26:35 crc kubenswrapper[4779]: I0929 10:26:35.683331 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/591ac1c1-65cb-4471-a9ee-8e4d17e9d395-catalog-content\") pod \"certified-operators-bhs8c\" (UID: \"591ac1c1-65cb-4471-a9ee-8e4d17e9d395\") " pod="openshift-marketplace/certified-operators-bhs8c" Sep 29 10:26:35 crc kubenswrapper[4779]: I0929 10:26:35.707182 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gkj4d\" (UniqueName: \"kubernetes.io/projected/591ac1c1-65cb-4471-a9ee-8e4d17e9d395-kube-api-access-gkj4d\") pod \"certified-operators-bhs8c\" (UID: \"591ac1c1-65cb-4471-a9ee-8e4d17e9d395\") " pod="openshift-marketplace/certified-operators-bhs8c" Sep 29 10:26:35 crc kubenswrapper[4779]: I0929 10:26:35.864614 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bhs8c" Sep 29 10:26:36 crc kubenswrapper[4779]: I0929 10:26:36.450638 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-bhs8c"] Sep 29 10:26:36 crc kubenswrapper[4779]: I0929 10:26:36.714424 4779 scope.go:117] "RemoveContainer" containerID="a993e5a0edbe6448eaf61b4b45ffde78e59ae455e408d7dd1afa5a0ff12dc338" Sep 29 10:26:36 crc kubenswrapper[4779]: E0929 10:26:36.715054 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:26:36 crc kubenswrapper[4779]: I0929 10:26:36.787781 4779 generic.go:334] "Generic (PLEG): container finished" podID="591ac1c1-65cb-4471-a9ee-8e4d17e9d395" containerID="5a6cf99dbcf230d3bfccaea85ae6ee2596d46cdafc7731ac890aa00dc62b4d04" exitCode=0 Sep 29 10:26:36 crc kubenswrapper[4779]: I0929 10:26:36.787865 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bhs8c" event={"ID":"591ac1c1-65cb-4471-a9ee-8e4d17e9d395","Type":"ContainerDied","Data":"5a6cf99dbcf230d3bfccaea85ae6ee2596d46cdafc7731ac890aa00dc62b4d04"} Sep 29 10:26:36 crc kubenswrapper[4779]: I0929 10:26:36.787959 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bhs8c" event={"ID":"591ac1c1-65cb-4471-a9ee-8e4d17e9d395","Type":"ContainerStarted","Data":"eb32ba34b46c5be18cd5ff7132a29a1f729d8e7dd2c939f2851d99059cc0e69a"} Sep 29 10:26:38 crc kubenswrapper[4779]: I0929 10:26:38.813021 4779 generic.go:334] "Generic (PLEG): container finished" podID="17e72d33-8852-486a-8973-fe22a52f6e00" containerID="5e680707f3a347fd7f4484ad6282cb815da7d813f7713f1ffb62791b70956012" exitCode=137 Sep 29 10:26:38 crc kubenswrapper[4779]: I0929 10:26:38.813303 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-54748d8d4-lx86m" event={"ID":"17e72d33-8852-486a-8973-fe22a52f6e00","Type":"ContainerDied","Data":"5e680707f3a347fd7f4484ad6282cb815da7d813f7713f1ffb62791b70956012"} Sep 29 10:26:38 crc kubenswrapper[4779]: I0929 10:26:38.831732 4779 generic.go:334] "Generic (PLEG): container finished" podID="591ac1c1-65cb-4471-a9ee-8e4d17e9d395" containerID="787e506e7e0dcdc697088a91e063f6935fadf4d4798d25bd9f1baa48a8cea8e7" exitCode=0 Sep 29 10:26:38 crc kubenswrapper[4779]: I0929 10:26:38.831779 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bhs8c" event={"ID":"591ac1c1-65cb-4471-a9ee-8e4d17e9d395","Type":"ContainerDied","Data":"787e506e7e0dcdc697088a91e063f6935fadf4d4798d25bd9f1baa48a8cea8e7"} Sep 29 10:26:38 crc kubenswrapper[4779]: I0929 10:26:38.967296 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-54748d8d4-lx86m" Sep 29 10:26:39 crc kubenswrapper[4779]: I0929 10:26:39.062918 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17e72d33-8852-486a-8973-fe22a52f6e00-combined-ca-bundle\") pod \"17e72d33-8852-486a-8973-fe22a52f6e00\" (UID: \"17e72d33-8852-486a-8973-fe22a52f6e00\") " Sep 29 10:26:39 crc kubenswrapper[4779]: I0929 10:26:39.063019 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/17e72d33-8852-486a-8973-fe22a52f6e00-logs\") pod \"17e72d33-8852-486a-8973-fe22a52f6e00\" (UID: \"17e72d33-8852-486a-8973-fe22a52f6e00\") " Sep 29 10:26:39 crc kubenswrapper[4779]: I0929 10:26:39.063132 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q765s\" (UniqueName: \"kubernetes.io/projected/17e72d33-8852-486a-8973-fe22a52f6e00-kube-api-access-q765s\") pod \"17e72d33-8852-486a-8973-fe22a52f6e00\" (UID: \"17e72d33-8852-486a-8973-fe22a52f6e00\") " Sep 29 10:26:39 crc kubenswrapper[4779]: I0929 10:26:39.063214 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/17e72d33-8852-486a-8973-fe22a52f6e00-horizon-secret-key\") pod \"17e72d33-8852-486a-8973-fe22a52f6e00\" (UID: \"17e72d33-8852-486a-8973-fe22a52f6e00\") " Sep 29 10:26:39 crc kubenswrapper[4779]: I0929 10:26:39.063325 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/17e72d33-8852-486a-8973-fe22a52f6e00-horizon-tls-certs\") pod \"17e72d33-8852-486a-8973-fe22a52f6e00\" (UID: \"17e72d33-8852-486a-8973-fe22a52f6e00\") " Sep 29 10:26:39 crc kubenswrapper[4779]: I0929 10:26:39.063434 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/17e72d33-8852-486a-8973-fe22a52f6e00-scripts\") pod \"17e72d33-8852-486a-8973-fe22a52f6e00\" (UID: \"17e72d33-8852-486a-8973-fe22a52f6e00\") " Sep 29 10:26:39 crc kubenswrapper[4779]: I0929 10:26:39.063519 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/17e72d33-8852-486a-8973-fe22a52f6e00-config-data\") pod \"17e72d33-8852-486a-8973-fe22a52f6e00\" (UID: \"17e72d33-8852-486a-8973-fe22a52f6e00\") " Sep 29 10:26:39 crc kubenswrapper[4779]: I0929 10:26:39.064651 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/17e72d33-8852-486a-8973-fe22a52f6e00-logs" (OuterVolumeSpecName: "logs") pod "17e72d33-8852-486a-8973-fe22a52f6e00" (UID: "17e72d33-8852-486a-8973-fe22a52f6e00"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 10:26:39 crc kubenswrapper[4779]: I0929 10:26:39.070592 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/17e72d33-8852-486a-8973-fe22a52f6e00-kube-api-access-q765s" (OuterVolumeSpecName: "kube-api-access-q765s") pod "17e72d33-8852-486a-8973-fe22a52f6e00" (UID: "17e72d33-8852-486a-8973-fe22a52f6e00"). InnerVolumeSpecName "kube-api-access-q765s". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 10:26:39 crc kubenswrapper[4779]: I0929 10:26:39.073290 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17e72d33-8852-486a-8973-fe22a52f6e00-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "17e72d33-8852-486a-8973-fe22a52f6e00" (UID: "17e72d33-8852-486a-8973-fe22a52f6e00"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:26:39 crc kubenswrapper[4779]: I0929 10:26:39.094973 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17e72d33-8852-486a-8973-fe22a52f6e00-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "17e72d33-8852-486a-8973-fe22a52f6e00" (UID: "17e72d33-8852-486a-8973-fe22a52f6e00"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:26:39 crc kubenswrapper[4779]: I0929 10:26:39.100833 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/17e72d33-8852-486a-8973-fe22a52f6e00-config-data" (OuterVolumeSpecName: "config-data") pod "17e72d33-8852-486a-8973-fe22a52f6e00" (UID: "17e72d33-8852-486a-8973-fe22a52f6e00"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 10:26:39 crc kubenswrapper[4779]: I0929 10:26:39.116951 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/17e72d33-8852-486a-8973-fe22a52f6e00-scripts" (OuterVolumeSpecName: "scripts") pod "17e72d33-8852-486a-8973-fe22a52f6e00" (UID: "17e72d33-8852-486a-8973-fe22a52f6e00"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 10:26:39 crc kubenswrapper[4779]: I0929 10:26:39.124971 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17e72d33-8852-486a-8973-fe22a52f6e00-horizon-tls-certs" (OuterVolumeSpecName: "horizon-tls-certs") pod "17e72d33-8852-486a-8973-fe22a52f6e00" (UID: "17e72d33-8852-486a-8973-fe22a52f6e00"). InnerVolumeSpecName "horizon-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:26:39 crc kubenswrapper[4779]: I0929 10:26:39.165973 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q765s\" (UniqueName: \"kubernetes.io/projected/17e72d33-8852-486a-8973-fe22a52f6e00-kube-api-access-q765s\") on node \"crc\" DevicePath \"\"" Sep 29 10:26:39 crc kubenswrapper[4779]: I0929 10:26:39.166019 4779 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/17e72d33-8852-486a-8973-fe22a52f6e00-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Sep 29 10:26:39 crc kubenswrapper[4779]: I0929 10:26:39.166030 4779 reconciler_common.go:293] "Volume detached for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/17e72d33-8852-486a-8973-fe22a52f6e00-horizon-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 10:26:39 crc kubenswrapper[4779]: I0929 10:26:39.166041 4779 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/17e72d33-8852-486a-8973-fe22a52f6e00-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 10:26:39 crc kubenswrapper[4779]: I0929 10:26:39.166050 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/17e72d33-8852-486a-8973-fe22a52f6e00-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 10:26:39 crc kubenswrapper[4779]: I0929 10:26:39.166061 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17e72d33-8852-486a-8973-fe22a52f6e00-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 10:26:39 crc kubenswrapper[4779]: I0929 10:26:39.166069 4779 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/17e72d33-8852-486a-8973-fe22a52f6e00-logs\") on node \"crc\" DevicePath \"\"" Sep 29 10:26:39 crc kubenswrapper[4779]: I0929 10:26:39.850282 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bhs8c" event={"ID":"591ac1c1-65cb-4471-a9ee-8e4d17e9d395","Type":"ContainerStarted","Data":"473904c3763e3f9aa1c1181670a88cdac8b9b737d6ffe3b2d452038f3f6fc2cf"} Sep 29 10:26:39 crc kubenswrapper[4779]: I0929 10:26:39.854619 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-54748d8d4-lx86m" event={"ID":"17e72d33-8852-486a-8973-fe22a52f6e00","Type":"ContainerDied","Data":"f55d14e01eab69dba2c5e4bddaf46172efad3a116a26c0368733c9bbbcff6cbd"} Sep 29 10:26:39 crc kubenswrapper[4779]: I0929 10:26:39.854679 4779 scope.go:117] "RemoveContainer" containerID="07ecb811035411289dfac6db74b1968bc043f172ee875213a6dda7c5548e7eeb" Sep 29 10:26:39 crc kubenswrapper[4779]: I0929 10:26:39.854706 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-54748d8d4-lx86m" Sep 29 10:26:39 crc kubenswrapper[4779]: I0929 10:26:39.877086 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-bhs8c" podStartSLOduration=2.405512571 podStartE2EDuration="4.877061994s" podCreationTimestamp="2025-09-29 10:26:35 +0000 UTC" firstStartedPulling="2025-09-29 10:26:36.790624122 +0000 UTC m=+3428.771948026" lastFinishedPulling="2025-09-29 10:26:39.262173545 +0000 UTC m=+3431.243497449" observedRunningTime="2025-09-29 10:26:39.874641785 +0000 UTC m=+3431.855965709" watchObservedRunningTime="2025-09-29 10:26:39.877061994 +0000 UTC m=+3431.858385898" Sep 29 10:26:39 crc kubenswrapper[4779]: I0929 10:26:39.900594 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-54748d8d4-lx86m"] Sep 29 10:26:39 crc kubenswrapper[4779]: I0929 10:26:39.908868 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-54748d8d4-lx86m"] Sep 29 10:26:40 crc kubenswrapper[4779]: I0929 10:26:40.028568 4779 scope.go:117] "RemoveContainer" containerID="5e680707f3a347fd7f4484ad6282cb815da7d813f7713f1ffb62791b70956012" Sep 29 10:26:40 crc kubenswrapper[4779]: I0929 10:26:40.726772 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="17e72d33-8852-486a-8973-fe22a52f6e00" path="/var/lib/kubelet/pods/17e72d33-8852-486a-8973-fe22a52f6e00/volumes" Sep 29 10:26:45 crc kubenswrapper[4779]: I0929 10:26:45.865187 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-bhs8c" Sep 29 10:26:45 crc kubenswrapper[4779]: I0929 10:26:45.865755 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-bhs8c" Sep 29 10:26:45 crc kubenswrapper[4779]: I0929 10:26:45.942786 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-bhs8c" Sep 29 10:26:45 crc kubenswrapper[4779]: I0929 10:26:45.998590 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-bhs8c" Sep 29 10:26:46 crc kubenswrapper[4779]: I0929 10:26:46.200305 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-bhs8c"] Sep 29 10:26:47 crc kubenswrapper[4779]: I0929 10:26:47.169372 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Sep 29 10:26:47 crc kubenswrapper[4779]: I0929 10:26:47.170249 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="1c5a9de7-c943-4654-bb1a-087fd1bb739e" containerName="prometheus" containerID="cri-o://4691ae1aafdb1775cb7f738fa4e5919e9ceaef81d7802b5bb89fcb24219d95ed" gracePeriod=600 Sep 29 10:26:47 crc kubenswrapper[4779]: I0929 10:26:47.170718 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="1c5a9de7-c943-4654-bb1a-087fd1bb739e" containerName="config-reloader" containerID="cri-o://d22c0e2ba105ddfc6f8935ba0c2d9739c12bb82a4acecb9a5fa0c89a9720d2c7" gracePeriod=600 Sep 29 10:26:47 crc kubenswrapper[4779]: I0929 10:26:47.170713 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="1c5a9de7-c943-4654-bb1a-087fd1bb739e" containerName="thanos-sidecar" containerID="cri-o://8e37cf67ecbe12050d8a7f3b49c88b07c480b057907ea7f37d1d6203cf26cb8a" gracePeriod=600 Sep 29 10:26:47 crc kubenswrapper[4779]: I0929 10:26:47.550347 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/prometheus-metric-storage-0" podUID="1c5a9de7-c943-4654-bb1a-087fd1bb739e" containerName="prometheus" probeResult="failure" output="Get \"https://10.217.0.129:9090/-/ready\": dial tcp 10.217.0.129:9090: connect: connection refused" Sep 29 10:26:47 crc kubenswrapper[4779]: I0929 10:26:47.940074 4779 generic.go:334] "Generic (PLEG): container finished" podID="1c5a9de7-c943-4654-bb1a-087fd1bb739e" containerID="8e37cf67ecbe12050d8a7f3b49c88b07c480b057907ea7f37d1d6203cf26cb8a" exitCode=0 Sep 29 10:26:47 crc kubenswrapper[4779]: I0929 10:26:47.940112 4779 generic.go:334] "Generic (PLEG): container finished" podID="1c5a9de7-c943-4654-bb1a-087fd1bb739e" containerID="d22c0e2ba105ddfc6f8935ba0c2d9739c12bb82a4acecb9a5fa0c89a9720d2c7" exitCode=0 Sep 29 10:26:47 crc kubenswrapper[4779]: I0929 10:26:47.940123 4779 generic.go:334] "Generic (PLEG): container finished" podID="1c5a9de7-c943-4654-bb1a-087fd1bb739e" containerID="4691ae1aafdb1775cb7f738fa4e5919e9ceaef81d7802b5bb89fcb24219d95ed" exitCode=0 Sep 29 10:26:47 crc kubenswrapper[4779]: I0929 10:26:47.940175 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"1c5a9de7-c943-4654-bb1a-087fd1bb739e","Type":"ContainerDied","Data":"8e37cf67ecbe12050d8a7f3b49c88b07c480b057907ea7f37d1d6203cf26cb8a"} Sep 29 10:26:47 crc kubenswrapper[4779]: I0929 10:26:47.940219 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"1c5a9de7-c943-4654-bb1a-087fd1bb739e","Type":"ContainerDied","Data":"d22c0e2ba105ddfc6f8935ba0c2d9739c12bb82a4acecb9a5fa0c89a9720d2c7"} Sep 29 10:26:47 crc kubenswrapper[4779]: I0929 10:26:47.940230 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"1c5a9de7-c943-4654-bb1a-087fd1bb739e","Type":"ContainerDied","Data":"4691ae1aafdb1775cb7f738fa4e5919e9ceaef81d7802b5bb89fcb24219d95ed"} Sep 29 10:26:47 crc kubenswrapper[4779]: I0929 10:26:47.940370 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-bhs8c" podUID="591ac1c1-65cb-4471-a9ee-8e4d17e9d395" containerName="registry-server" containerID="cri-o://473904c3763e3f9aa1c1181670a88cdac8b9b737d6ffe3b2d452038f3f6fc2cf" gracePeriod=2 Sep 29 10:26:48 crc kubenswrapper[4779]: I0929 10:26:48.171554 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Sep 29 10:26:48 crc kubenswrapper[4779]: I0929 10:26:48.286616 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/1c5a9de7-c943-4654-bb1a-087fd1bb739e-thanos-prometheus-http-client-file\") pod \"1c5a9de7-c943-4654-bb1a-087fd1bb739e\" (UID: \"1c5a9de7-c943-4654-bb1a-087fd1bb739e\") " Sep 29 10:26:48 crc kubenswrapper[4779]: I0929 10:26:48.286675 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/1c5a9de7-c943-4654-bb1a-087fd1bb739e-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"1c5a9de7-c943-4654-bb1a-087fd1bb739e\" (UID: \"1c5a9de7-c943-4654-bb1a-087fd1bb739e\") " Sep 29 10:26:48 crc kubenswrapper[4779]: I0929 10:26:48.286698 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mwc8r\" (UniqueName: \"kubernetes.io/projected/1c5a9de7-c943-4654-bb1a-087fd1bb739e-kube-api-access-mwc8r\") pod \"1c5a9de7-c943-4654-bb1a-087fd1bb739e\" (UID: \"1c5a9de7-c943-4654-bb1a-087fd1bb739e\") " Sep 29 10:26:48 crc kubenswrapper[4779]: I0929 10:26:48.286881 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/1c5a9de7-c943-4654-bb1a-087fd1bb739e-prometheus-metric-storage-rulefiles-0\") pod \"1c5a9de7-c943-4654-bb1a-087fd1bb739e\" (UID: \"1c5a9de7-c943-4654-bb1a-087fd1bb739e\") " Sep 29 10:26:48 crc kubenswrapper[4779]: I0929 10:26:48.286936 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/1c5a9de7-c943-4654-bb1a-087fd1bb739e-tls-assets\") pod \"1c5a9de7-c943-4654-bb1a-087fd1bb739e\" (UID: \"1c5a9de7-c943-4654-bb1a-087fd1bb739e\") " Sep 29 10:26:48 crc kubenswrapper[4779]: I0929 10:26:48.286957 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c5a9de7-c943-4654-bb1a-087fd1bb739e-secret-combined-ca-bundle\") pod \"1c5a9de7-c943-4654-bb1a-087fd1bb739e\" (UID: \"1c5a9de7-c943-4654-bb1a-087fd1bb739e\") " Sep 29 10:26:48 crc kubenswrapper[4779]: I0929 10:26:48.287054 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/1c5a9de7-c943-4654-bb1a-087fd1bb739e-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"1c5a9de7-c943-4654-bb1a-087fd1bb739e\" (UID: \"1c5a9de7-c943-4654-bb1a-087fd1bb739e\") " Sep 29 10:26:48 crc kubenswrapper[4779]: I0929 10:26:48.287211 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-db\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-839bd8cf-b5d6-43ab-b010-7b66d2eb87b8\") pod \"1c5a9de7-c943-4654-bb1a-087fd1bb739e\" (UID: \"1c5a9de7-c943-4654-bb1a-087fd1bb739e\") " Sep 29 10:26:48 crc kubenswrapper[4779]: I0929 10:26:48.287273 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/1c5a9de7-c943-4654-bb1a-087fd1bb739e-config\") pod \"1c5a9de7-c943-4654-bb1a-087fd1bb739e\" (UID: \"1c5a9de7-c943-4654-bb1a-087fd1bb739e\") " Sep 29 10:26:48 crc kubenswrapper[4779]: I0929 10:26:48.287304 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/1c5a9de7-c943-4654-bb1a-087fd1bb739e-config-out\") pod \"1c5a9de7-c943-4654-bb1a-087fd1bb739e\" (UID: \"1c5a9de7-c943-4654-bb1a-087fd1bb739e\") " Sep 29 10:26:48 crc kubenswrapper[4779]: I0929 10:26:48.287326 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/1c5a9de7-c943-4654-bb1a-087fd1bb739e-web-config\") pod \"1c5a9de7-c943-4654-bb1a-087fd1bb739e\" (UID: \"1c5a9de7-c943-4654-bb1a-087fd1bb739e\") " Sep 29 10:26:48 crc kubenswrapper[4779]: I0929 10:26:48.294253 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1c5a9de7-c943-4654-bb1a-087fd1bb739e-prometheus-metric-storage-rulefiles-0" (OuterVolumeSpecName: "prometheus-metric-storage-rulefiles-0") pod "1c5a9de7-c943-4654-bb1a-087fd1bb739e" (UID: "1c5a9de7-c943-4654-bb1a-087fd1bb739e"). InnerVolumeSpecName "prometheus-metric-storage-rulefiles-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 10:26:48 crc kubenswrapper[4779]: I0929 10:26:48.294795 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c5a9de7-c943-4654-bb1a-087fd1bb739e-thanos-prometheus-http-client-file" (OuterVolumeSpecName: "thanos-prometheus-http-client-file") pod "1c5a9de7-c943-4654-bb1a-087fd1bb739e" (UID: "1c5a9de7-c943-4654-bb1a-087fd1bb739e"). InnerVolumeSpecName "thanos-prometheus-http-client-file". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:26:48 crc kubenswrapper[4779]: I0929 10:26:48.300642 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c5a9de7-c943-4654-bb1a-087fd1bb739e-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d" (OuterVolumeSpecName: "web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d") pod "1c5a9de7-c943-4654-bb1a-087fd1bb739e" (UID: "1c5a9de7-c943-4654-bb1a-087fd1bb739e"). InnerVolumeSpecName "web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:26:48 crc kubenswrapper[4779]: I0929 10:26:48.301591 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c5a9de7-c943-4654-bb1a-087fd1bb739e-secret-combined-ca-bundle" (OuterVolumeSpecName: "secret-combined-ca-bundle") pod "1c5a9de7-c943-4654-bb1a-087fd1bb739e" (UID: "1c5a9de7-c943-4654-bb1a-087fd1bb739e"). InnerVolumeSpecName "secret-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:26:48 crc kubenswrapper[4779]: I0929 10:26:48.313265 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c5a9de7-c943-4654-bb1a-087fd1bb739e-tls-assets" (OuterVolumeSpecName: "tls-assets") pod "1c5a9de7-c943-4654-bb1a-087fd1bb739e" (UID: "1c5a9de7-c943-4654-bb1a-087fd1bb739e"). InnerVolumeSpecName "tls-assets". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 10:26:48 crc kubenswrapper[4779]: I0929 10:26:48.313279 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c5a9de7-c943-4654-bb1a-087fd1bb739e-config" (OuterVolumeSpecName: "config") pod "1c5a9de7-c943-4654-bb1a-087fd1bb739e" (UID: "1c5a9de7-c943-4654-bb1a-087fd1bb739e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:26:48 crc kubenswrapper[4779]: I0929 10:26:48.313323 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c5a9de7-c943-4654-bb1a-087fd1bb739e-kube-api-access-mwc8r" (OuterVolumeSpecName: "kube-api-access-mwc8r") pod "1c5a9de7-c943-4654-bb1a-087fd1bb739e" (UID: "1c5a9de7-c943-4654-bb1a-087fd1bb739e"). InnerVolumeSpecName "kube-api-access-mwc8r". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 10:26:48 crc kubenswrapper[4779]: I0929 10:26:48.316077 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1c5a9de7-c943-4654-bb1a-087fd1bb739e-config-out" (OuterVolumeSpecName: "config-out") pod "1c5a9de7-c943-4654-bb1a-087fd1bb739e" (UID: "1c5a9de7-c943-4654-bb1a-087fd1bb739e"). InnerVolumeSpecName "config-out". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 10:26:48 crc kubenswrapper[4779]: I0929 10:26:48.322040 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c5a9de7-c943-4654-bb1a-087fd1bb739e-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d" (OuterVolumeSpecName: "web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d") pod "1c5a9de7-c943-4654-bb1a-087fd1bb739e" (UID: "1c5a9de7-c943-4654-bb1a-087fd1bb739e"). InnerVolumeSpecName "web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:26:48 crc kubenswrapper[4779]: I0929 10:26:48.386801 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-839bd8cf-b5d6-43ab-b010-7b66d2eb87b8" (OuterVolumeSpecName: "prometheus-metric-storage-db") pod "1c5a9de7-c943-4654-bb1a-087fd1bb739e" (UID: "1c5a9de7-c943-4654-bb1a-087fd1bb739e"). InnerVolumeSpecName "pvc-839bd8cf-b5d6-43ab-b010-7b66d2eb87b8". PluginName "kubernetes.io/csi", VolumeGidValue "" Sep 29 10:26:48 crc kubenswrapper[4779]: I0929 10:26:48.389720 4779 reconciler_common.go:293] "Volume detached for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/1c5a9de7-c943-4654-bb1a-087fd1bb739e-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") on node \"crc\" DevicePath \"\"" Sep 29 10:26:48 crc kubenswrapper[4779]: I0929 10:26:48.389768 4779 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-839bd8cf-b5d6-43ab-b010-7b66d2eb87b8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-839bd8cf-b5d6-43ab-b010-7b66d2eb87b8\") on node \"crc\" " Sep 29 10:26:48 crc kubenswrapper[4779]: I0929 10:26:48.389779 4779 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/1c5a9de7-c943-4654-bb1a-087fd1bb739e-config\") on node \"crc\" DevicePath \"\"" Sep 29 10:26:48 crc kubenswrapper[4779]: I0929 10:26:48.389789 4779 reconciler_common.go:293] "Volume detached for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/1c5a9de7-c943-4654-bb1a-087fd1bb739e-config-out\") on node \"crc\" DevicePath \"\"" Sep 29 10:26:48 crc kubenswrapper[4779]: I0929 10:26:48.389798 4779 reconciler_common.go:293] "Volume detached for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/1c5a9de7-c943-4654-bb1a-087fd1bb739e-thanos-prometheus-http-client-file\") on node \"crc\" DevicePath \"\"" Sep 29 10:26:48 crc kubenswrapper[4779]: I0929 10:26:48.389811 4779 reconciler_common.go:293] "Volume detached for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/1c5a9de7-c943-4654-bb1a-087fd1bb739e-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") on node \"crc\" DevicePath \"\"" Sep 29 10:26:48 crc kubenswrapper[4779]: I0929 10:26:48.389821 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mwc8r\" (UniqueName: \"kubernetes.io/projected/1c5a9de7-c943-4654-bb1a-087fd1bb739e-kube-api-access-mwc8r\") on node \"crc\" DevicePath \"\"" Sep 29 10:26:48 crc kubenswrapper[4779]: I0929 10:26:48.389830 4779 reconciler_common.go:293] "Volume detached for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/1c5a9de7-c943-4654-bb1a-087fd1bb739e-prometheus-metric-storage-rulefiles-0\") on node \"crc\" DevicePath \"\"" Sep 29 10:26:48 crc kubenswrapper[4779]: I0929 10:26:48.389838 4779 reconciler_common.go:293] "Volume detached for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/1c5a9de7-c943-4654-bb1a-087fd1bb739e-tls-assets\") on node \"crc\" DevicePath \"\"" Sep 29 10:26:48 crc kubenswrapper[4779]: I0929 10:26:48.389847 4779 reconciler_common.go:293] "Volume detached for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c5a9de7-c943-4654-bb1a-087fd1bb739e-secret-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 10:26:48 crc kubenswrapper[4779]: I0929 10:26:48.403743 4779 scope.go:117] "RemoveContainer" containerID="125698833ede566b41210de4b766e59874ee11f0b7ab1025b0525519921f098f" Sep 29 10:26:48 crc kubenswrapper[4779]: I0929 10:26:48.435243 4779 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Sep 29 10:26:48 crc kubenswrapper[4779]: I0929 10:26:48.435698 4779 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-839bd8cf-b5d6-43ab-b010-7b66d2eb87b8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-839bd8cf-b5d6-43ab-b010-7b66d2eb87b8") on node "crc" Sep 29 10:26:48 crc kubenswrapper[4779]: I0929 10:26:48.441859 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c5a9de7-c943-4654-bb1a-087fd1bb739e-web-config" (OuterVolumeSpecName: "web-config") pod "1c5a9de7-c943-4654-bb1a-087fd1bb739e" (UID: "1c5a9de7-c943-4654-bb1a-087fd1bb739e"). InnerVolumeSpecName "web-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:26:48 crc kubenswrapper[4779]: I0929 10:26:48.491988 4779 reconciler_common.go:293] "Volume detached for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/1c5a9de7-c943-4654-bb1a-087fd1bb739e-web-config\") on node \"crc\" DevicePath \"\"" Sep 29 10:26:48 crc kubenswrapper[4779]: I0929 10:26:48.492026 4779 reconciler_common.go:293] "Volume detached for volume \"pvc-839bd8cf-b5d6-43ab-b010-7b66d2eb87b8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-839bd8cf-b5d6-43ab-b010-7b66d2eb87b8\") on node \"crc\" DevicePath \"\"" Sep 29 10:26:48 crc kubenswrapper[4779]: I0929 10:26:48.529849 4779 scope.go:117] "RemoveContainer" containerID="8e37cf67ecbe12050d8a7f3b49c88b07c480b057907ea7f37d1d6203cf26cb8a" Sep 29 10:26:48 crc kubenswrapper[4779]: I0929 10:26:48.560508 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bhs8c" Sep 29 10:26:48 crc kubenswrapper[4779]: I0929 10:26:48.612687 4779 scope.go:117] "RemoveContainer" containerID="d22c0e2ba105ddfc6f8935ba0c2d9739c12bb82a4acecb9a5fa0c89a9720d2c7" Sep 29 10:26:48 crc kubenswrapper[4779]: I0929 10:26:48.660806 4779 scope.go:117] "RemoveContainer" containerID="4691ae1aafdb1775cb7f738fa4e5919e9ceaef81d7802b5bb89fcb24219d95ed" Sep 29 10:26:48 crc kubenswrapper[4779]: I0929 10:26:48.694340 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/591ac1c1-65cb-4471-a9ee-8e4d17e9d395-utilities\") pod \"591ac1c1-65cb-4471-a9ee-8e4d17e9d395\" (UID: \"591ac1c1-65cb-4471-a9ee-8e4d17e9d395\") " Sep 29 10:26:48 crc kubenswrapper[4779]: I0929 10:26:48.694782 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/591ac1c1-65cb-4471-a9ee-8e4d17e9d395-catalog-content\") pod \"591ac1c1-65cb-4471-a9ee-8e4d17e9d395\" (UID: \"591ac1c1-65cb-4471-a9ee-8e4d17e9d395\") " Sep 29 10:26:48 crc kubenswrapper[4779]: I0929 10:26:48.695013 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gkj4d\" (UniqueName: \"kubernetes.io/projected/591ac1c1-65cb-4471-a9ee-8e4d17e9d395-kube-api-access-gkj4d\") pod \"591ac1c1-65cb-4471-a9ee-8e4d17e9d395\" (UID: \"591ac1c1-65cb-4471-a9ee-8e4d17e9d395\") " Sep 29 10:26:48 crc kubenswrapper[4779]: I0929 10:26:48.695490 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/591ac1c1-65cb-4471-a9ee-8e4d17e9d395-utilities" (OuterVolumeSpecName: "utilities") pod "591ac1c1-65cb-4471-a9ee-8e4d17e9d395" (UID: "591ac1c1-65cb-4471-a9ee-8e4d17e9d395"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 10:26:48 crc kubenswrapper[4779]: I0929 10:26:48.695802 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/591ac1c1-65cb-4471-a9ee-8e4d17e9d395-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 10:26:48 crc kubenswrapper[4779]: I0929 10:26:48.699094 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/591ac1c1-65cb-4471-a9ee-8e4d17e9d395-kube-api-access-gkj4d" (OuterVolumeSpecName: "kube-api-access-gkj4d") pod "591ac1c1-65cb-4471-a9ee-8e4d17e9d395" (UID: "591ac1c1-65cb-4471-a9ee-8e4d17e9d395"). InnerVolumeSpecName "kube-api-access-gkj4d". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 10:26:48 crc kubenswrapper[4779]: I0929 10:26:48.741855 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/591ac1c1-65cb-4471-a9ee-8e4d17e9d395-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "591ac1c1-65cb-4471-a9ee-8e4d17e9d395" (UID: "591ac1c1-65cb-4471-a9ee-8e4d17e9d395"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 10:26:48 crc kubenswrapper[4779]: I0929 10:26:48.797620 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/591ac1c1-65cb-4471-a9ee-8e4d17e9d395-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 10:26:48 crc kubenswrapper[4779]: I0929 10:26:48.797667 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gkj4d\" (UniqueName: \"kubernetes.io/projected/591ac1c1-65cb-4471-a9ee-8e4d17e9d395-kube-api-access-gkj4d\") on node \"crc\" DevicePath \"\"" Sep 29 10:26:48 crc kubenswrapper[4779]: I0929 10:26:48.962837 4779 generic.go:334] "Generic (PLEG): container finished" podID="591ac1c1-65cb-4471-a9ee-8e4d17e9d395" containerID="473904c3763e3f9aa1c1181670a88cdac8b9b737d6ffe3b2d452038f3f6fc2cf" exitCode=0 Sep 29 10:26:48 crc kubenswrapper[4779]: I0929 10:26:48.962957 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bhs8c" event={"ID":"591ac1c1-65cb-4471-a9ee-8e4d17e9d395","Type":"ContainerDied","Data":"473904c3763e3f9aa1c1181670a88cdac8b9b737d6ffe3b2d452038f3f6fc2cf"} Sep 29 10:26:48 crc kubenswrapper[4779]: I0929 10:26:48.963013 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Sep 29 10:26:48 crc kubenswrapper[4779]: I0929 10:26:48.963032 4779 scope.go:117] "RemoveContainer" containerID="473904c3763e3f9aa1c1181670a88cdac8b9b737d6ffe3b2d452038f3f6fc2cf" Sep 29 10:26:48 crc kubenswrapper[4779]: I0929 10:26:48.963018 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bhs8c" event={"ID":"591ac1c1-65cb-4471-a9ee-8e4d17e9d395","Type":"ContainerDied","Data":"eb32ba34b46c5be18cd5ff7132a29a1f729d8e7dd2c939f2851d99059cc0e69a"} Sep 29 10:26:48 crc kubenswrapper[4779]: I0929 10:26:48.962978 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bhs8c" Sep 29 10:26:48 crc kubenswrapper[4779]: I0929 10:26:48.963736 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"1c5a9de7-c943-4654-bb1a-087fd1bb739e","Type":"ContainerDied","Data":"e37e8ea7604fb2c6fba0963aad6d9aa60627e04edf4d3b9d49bb6006bb3a1e26"} Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.007070 4779 scope.go:117] "RemoveContainer" containerID="787e506e7e0dcdc697088a91e063f6935fadf4d4798d25bd9f1baa48a8cea8e7" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.034772 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-bhs8c"] Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.042884 4779 scope.go:117] "RemoveContainer" containerID="5a6cf99dbcf230d3bfccaea85ae6ee2596d46cdafc7731ac890aa00dc62b4d04" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.047252 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-bhs8c"] Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.077007 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.079672 4779 scope.go:117] "RemoveContainer" containerID="473904c3763e3f9aa1c1181670a88cdac8b9b737d6ffe3b2d452038f3f6fc2cf" Sep 29 10:26:49 crc kubenswrapper[4779]: E0929 10:26:49.080203 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"473904c3763e3f9aa1c1181670a88cdac8b9b737d6ffe3b2d452038f3f6fc2cf\": container with ID starting with 473904c3763e3f9aa1c1181670a88cdac8b9b737d6ffe3b2d452038f3f6fc2cf not found: ID does not exist" containerID="473904c3763e3f9aa1c1181670a88cdac8b9b737d6ffe3b2d452038f3f6fc2cf" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.080245 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"473904c3763e3f9aa1c1181670a88cdac8b9b737d6ffe3b2d452038f3f6fc2cf"} err="failed to get container status \"473904c3763e3f9aa1c1181670a88cdac8b9b737d6ffe3b2d452038f3f6fc2cf\": rpc error: code = NotFound desc = could not find container \"473904c3763e3f9aa1c1181670a88cdac8b9b737d6ffe3b2d452038f3f6fc2cf\": container with ID starting with 473904c3763e3f9aa1c1181670a88cdac8b9b737d6ffe3b2d452038f3f6fc2cf not found: ID does not exist" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.080266 4779 scope.go:117] "RemoveContainer" containerID="787e506e7e0dcdc697088a91e063f6935fadf4d4798d25bd9f1baa48a8cea8e7" Sep 29 10:26:49 crc kubenswrapper[4779]: E0929 10:26:49.081049 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"787e506e7e0dcdc697088a91e063f6935fadf4d4798d25bd9f1baa48a8cea8e7\": container with ID starting with 787e506e7e0dcdc697088a91e063f6935fadf4d4798d25bd9f1baa48a8cea8e7 not found: ID does not exist" containerID="787e506e7e0dcdc697088a91e063f6935fadf4d4798d25bd9f1baa48a8cea8e7" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.081072 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"787e506e7e0dcdc697088a91e063f6935fadf4d4798d25bd9f1baa48a8cea8e7"} err="failed to get container status \"787e506e7e0dcdc697088a91e063f6935fadf4d4798d25bd9f1baa48a8cea8e7\": rpc error: code = NotFound desc = could not find container \"787e506e7e0dcdc697088a91e063f6935fadf4d4798d25bd9f1baa48a8cea8e7\": container with ID starting with 787e506e7e0dcdc697088a91e063f6935fadf4d4798d25bd9f1baa48a8cea8e7 not found: ID does not exist" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.081086 4779 scope.go:117] "RemoveContainer" containerID="5a6cf99dbcf230d3bfccaea85ae6ee2596d46cdafc7731ac890aa00dc62b4d04" Sep 29 10:26:49 crc kubenswrapper[4779]: E0929 10:26:49.083785 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5a6cf99dbcf230d3bfccaea85ae6ee2596d46cdafc7731ac890aa00dc62b4d04\": container with ID starting with 5a6cf99dbcf230d3bfccaea85ae6ee2596d46cdafc7731ac890aa00dc62b4d04 not found: ID does not exist" containerID="5a6cf99dbcf230d3bfccaea85ae6ee2596d46cdafc7731ac890aa00dc62b4d04" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.083816 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a6cf99dbcf230d3bfccaea85ae6ee2596d46cdafc7731ac890aa00dc62b4d04"} err="failed to get container status \"5a6cf99dbcf230d3bfccaea85ae6ee2596d46cdafc7731ac890aa00dc62b4d04\": rpc error: code = NotFound desc = could not find container \"5a6cf99dbcf230d3bfccaea85ae6ee2596d46cdafc7731ac890aa00dc62b4d04\": container with ID starting with 5a6cf99dbcf230d3bfccaea85ae6ee2596d46cdafc7731ac890aa00dc62b4d04 not found: ID does not exist" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.093150 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/prometheus-metric-storage-0"] Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.105962 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Sep 29 10:26:49 crc kubenswrapper[4779]: E0929 10:26:49.106572 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c5a9de7-c943-4654-bb1a-087fd1bb739e" containerName="config-reloader" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.106598 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c5a9de7-c943-4654-bb1a-087fd1bb739e" containerName="config-reloader" Sep 29 10:26:49 crc kubenswrapper[4779]: E0929 10:26:49.106617 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="591ac1c1-65cb-4471-a9ee-8e4d17e9d395" containerName="extract-content" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.106625 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="591ac1c1-65cb-4471-a9ee-8e4d17e9d395" containerName="extract-content" Sep 29 10:26:49 crc kubenswrapper[4779]: E0929 10:26:49.106644 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17e72d33-8852-486a-8973-fe22a52f6e00" containerName="horizon-log" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.106652 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="17e72d33-8852-486a-8973-fe22a52f6e00" containerName="horizon-log" Sep 29 10:26:49 crc kubenswrapper[4779]: E0929 10:26:49.106673 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17e72d33-8852-486a-8973-fe22a52f6e00" containerName="horizon" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.106680 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="17e72d33-8852-486a-8973-fe22a52f6e00" containerName="horizon" Sep 29 10:26:49 crc kubenswrapper[4779]: E0929 10:26:49.106696 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c5a9de7-c943-4654-bb1a-087fd1bb739e" containerName="prometheus" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.106703 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c5a9de7-c943-4654-bb1a-087fd1bb739e" containerName="prometheus" Sep 29 10:26:49 crc kubenswrapper[4779]: E0929 10:26:49.106718 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="591ac1c1-65cb-4471-a9ee-8e4d17e9d395" containerName="extract-utilities" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.106726 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="591ac1c1-65cb-4471-a9ee-8e4d17e9d395" containerName="extract-utilities" Sep 29 10:26:49 crc kubenswrapper[4779]: E0929 10:26:49.106741 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="591ac1c1-65cb-4471-a9ee-8e4d17e9d395" containerName="registry-server" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.106749 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="591ac1c1-65cb-4471-a9ee-8e4d17e9d395" containerName="registry-server" Sep 29 10:26:49 crc kubenswrapper[4779]: E0929 10:26:49.106762 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c5a9de7-c943-4654-bb1a-087fd1bb739e" containerName="thanos-sidecar" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.106770 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c5a9de7-c943-4654-bb1a-087fd1bb739e" containerName="thanos-sidecar" Sep 29 10:26:49 crc kubenswrapper[4779]: E0929 10:26:49.106778 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c5a9de7-c943-4654-bb1a-087fd1bb739e" containerName="init-config-reloader" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.106786 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c5a9de7-c943-4654-bb1a-087fd1bb739e" containerName="init-config-reloader" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.107090 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="17e72d33-8852-486a-8973-fe22a52f6e00" containerName="horizon" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.107114 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="591ac1c1-65cb-4471-a9ee-8e4d17e9d395" containerName="registry-server" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.107129 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c5a9de7-c943-4654-bb1a-087fd1bb739e" containerName="prometheus" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.107138 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c5a9de7-c943-4654-bb1a-087fd1bb739e" containerName="config-reloader" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.107146 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="17e72d33-8852-486a-8973-fe22a52f6e00" containerName="horizon-log" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.107161 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c5a9de7-c943-4654-bb1a-087fd1bb739e" containerName="thanos-sidecar" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.109626 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.113523 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.114651 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.114940 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.114945 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.115146 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-zgl4h" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.116734 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.136277 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.206051 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cq9lc\" (UniqueName: \"kubernetes.io/projected/65057b07-2357-4d32-bd20-dee421ca5041-kube-api-access-cq9lc\") pod \"prometheus-metric-storage-0\" (UID: \"65057b07-2357-4d32-bd20-dee421ca5041\") " pod="openstack/prometheus-metric-storage-0" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.206157 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/65057b07-2357-4d32-bd20-dee421ca5041-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"65057b07-2357-4d32-bd20-dee421ca5041\") " pod="openstack/prometheus-metric-storage-0" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.206192 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/65057b07-2357-4d32-bd20-dee421ca5041-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"65057b07-2357-4d32-bd20-dee421ca5041\") " pod="openstack/prometheus-metric-storage-0" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.206210 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/65057b07-2357-4d32-bd20-dee421ca5041-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"65057b07-2357-4d32-bd20-dee421ca5041\") " pod="openstack/prometheus-metric-storage-0" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.206240 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/65057b07-2357-4d32-bd20-dee421ca5041-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"65057b07-2357-4d32-bd20-dee421ca5041\") " pod="openstack/prometheus-metric-storage-0" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.206356 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/65057b07-2357-4d32-bd20-dee421ca5041-config\") pod \"prometheus-metric-storage-0\" (UID: \"65057b07-2357-4d32-bd20-dee421ca5041\") " pod="openstack/prometheus-metric-storage-0" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.206386 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-839bd8cf-b5d6-43ab-b010-7b66d2eb87b8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-839bd8cf-b5d6-43ab-b010-7b66d2eb87b8\") pod \"prometheus-metric-storage-0\" (UID: \"65057b07-2357-4d32-bd20-dee421ca5041\") " pod="openstack/prometheus-metric-storage-0" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.206456 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/65057b07-2357-4d32-bd20-dee421ca5041-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"65057b07-2357-4d32-bd20-dee421ca5041\") " pod="openstack/prometheus-metric-storage-0" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.206485 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65057b07-2357-4d32-bd20-dee421ca5041-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"65057b07-2357-4d32-bd20-dee421ca5041\") " pod="openstack/prometheus-metric-storage-0" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.206532 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/65057b07-2357-4d32-bd20-dee421ca5041-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"65057b07-2357-4d32-bd20-dee421ca5041\") " pod="openstack/prometheus-metric-storage-0" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.207540 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/65057b07-2357-4d32-bd20-dee421ca5041-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"65057b07-2357-4d32-bd20-dee421ca5041\") " pod="openstack/prometheus-metric-storage-0" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.309881 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/65057b07-2357-4d32-bd20-dee421ca5041-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"65057b07-2357-4d32-bd20-dee421ca5041\") " pod="openstack/prometheus-metric-storage-0" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.309963 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/65057b07-2357-4d32-bd20-dee421ca5041-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"65057b07-2357-4d32-bd20-dee421ca5041\") " pod="openstack/prometheus-metric-storage-0" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.310029 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cq9lc\" (UniqueName: \"kubernetes.io/projected/65057b07-2357-4d32-bd20-dee421ca5041-kube-api-access-cq9lc\") pod \"prometheus-metric-storage-0\" (UID: \"65057b07-2357-4d32-bd20-dee421ca5041\") " pod="openstack/prometheus-metric-storage-0" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.310066 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/65057b07-2357-4d32-bd20-dee421ca5041-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"65057b07-2357-4d32-bd20-dee421ca5041\") " pod="openstack/prometheus-metric-storage-0" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.310090 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/65057b07-2357-4d32-bd20-dee421ca5041-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"65057b07-2357-4d32-bd20-dee421ca5041\") " pod="openstack/prometheus-metric-storage-0" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.310109 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/65057b07-2357-4d32-bd20-dee421ca5041-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"65057b07-2357-4d32-bd20-dee421ca5041\") " pod="openstack/prometheus-metric-storage-0" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.310222 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/65057b07-2357-4d32-bd20-dee421ca5041-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"65057b07-2357-4d32-bd20-dee421ca5041\") " pod="openstack/prometheus-metric-storage-0" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.310747 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/65057b07-2357-4d32-bd20-dee421ca5041-config\") pod \"prometheus-metric-storage-0\" (UID: \"65057b07-2357-4d32-bd20-dee421ca5041\") " pod="openstack/prometheus-metric-storage-0" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.310806 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-839bd8cf-b5d6-43ab-b010-7b66d2eb87b8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-839bd8cf-b5d6-43ab-b010-7b66d2eb87b8\") pod \"prometheus-metric-storage-0\" (UID: \"65057b07-2357-4d32-bd20-dee421ca5041\") " pod="openstack/prometheus-metric-storage-0" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.311018 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/65057b07-2357-4d32-bd20-dee421ca5041-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"65057b07-2357-4d32-bd20-dee421ca5041\") " pod="openstack/prometheus-metric-storage-0" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.311287 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65057b07-2357-4d32-bd20-dee421ca5041-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"65057b07-2357-4d32-bd20-dee421ca5041\") " pod="openstack/prometheus-metric-storage-0" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.312125 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/65057b07-2357-4d32-bd20-dee421ca5041-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"65057b07-2357-4d32-bd20-dee421ca5041\") " pod="openstack/prometheus-metric-storage-0" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.314180 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/65057b07-2357-4d32-bd20-dee421ca5041-config\") pod \"prometheus-metric-storage-0\" (UID: \"65057b07-2357-4d32-bd20-dee421ca5041\") " pod="openstack/prometheus-metric-storage-0" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.315450 4779 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.315481 4779 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-839bd8cf-b5d6-43ab-b010-7b66d2eb87b8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-839bd8cf-b5d6-43ab-b010-7b66d2eb87b8\") pod \"prometheus-metric-storage-0\" (UID: \"65057b07-2357-4d32-bd20-dee421ca5041\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/d7c28a7a22cfe960b99bbb5b934acd1f650db36f185879457a9343b648a1e5b0/globalmount\"" pod="openstack/prometheus-metric-storage-0" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.315766 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/65057b07-2357-4d32-bd20-dee421ca5041-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"65057b07-2357-4d32-bd20-dee421ca5041\") " pod="openstack/prometheus-metric-storage-0" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.315855 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/65057b07-2357-4d32-bd20-dee421ca5041-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"65057b07-2357-4d32-bd20-dee421ca5041\") " pod="openstack/prometheus-metric-storage-0" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.316117 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/65057b07-2357-4d32-bd20-dee421ca5041-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"65057b07-2357-4d32-bd20-dee421ca5041\") " pod="openstack/prometheus-metric-storage-0" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.316958 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65057b07-2357-4d32-bd20-dee421ca5041-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"65057b07-2357-4d32-bd20-dee421ca5041\") " pod="openstack/prometheus-metric-storage-0" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.317418 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/65057b07-2357-4d32-bd20-dee421ca5041-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"65057b07-2357-4d32-bd20-dee421ca5041\") " pod="openstack/prometheus-metric-storage-0" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.317937 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/65057b07-2357-4d32-bd20-dee421ca5041-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"65057b07-2357-4d32-bd20-dee421ca5041\") " pod="openstack/prometheus-metric-storage-0" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.324249 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/65057b07-2357-4d32-bd20-dee421ca5041-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"65057b07-2357-4d32-bd20-dee421ca5041\") " pod="openstack/prometheus-metric-storage-0" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.327096 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cq9lc\" (UniqueName: \"kubernetes.io/projected/65057b07-2357-4d32-bd20-dee421ca5041-kube-api-access-cq9lc\") pod \"prometheus-metric-storage-0\" (UID: \"65057b07-2357-4d32-bd20-dee421ca5041\") " pod="openstack/prometheus-metric-storage-0" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.364223 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-839bd8cf-b5d6-43ab-b010-7b66d2eb87b8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-839bd8cf-b5d6-43ab-b010-7b66d2eb87b8\") pod \"prometheus-metric-storage-0\" (UID: \"65057b07-2357-4d32-bd20-dee421ca5041\") " pod="openstack/prometheus-metric-storage-0" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.447680 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.714252 4779 scope.go:117] "RemoveContainer" containerID="a993e5a0edbe6448eaf61b4b45ffde78e59ae455e408d7dd1afa5a0ff12dc338" Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.945796 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Sep 29 10:26:49 crc kubenswrapper[4779]: W0929 10:26:49.949941 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod65057b07_2357_4d32_bd20_dee421ca5041.slice/crio-a35f5ff3e23ed27ca0942fad8196f0749c1b266f2bdb094b1491ccb4ea5b2246 WatchSource:0}: Error finding container a35f5ff3e23ed27ca0942fad8196f0749c1b266f2bdb094b1491ccb4ea5b2246: Status 404 returned error can't find the container with id a35f5ff3e23ed27ca0942fad8196f0749c1b266f2bdb094b1491ccb4ea5b2246 Sep 29 10:26:49 crc kubenswrapper[4779]: I0929 10:26:49.975681 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"65057b07-2357-4d32-bd20-dee421ca5041","Type":"ContainerStarted","Data":"a35f5ff3e23ed27ca0942fad8196f0749c1b266f2bdb094b1491ccb4ea5b2246"} Sep 29 10:26:50 crc kubenswrapper[4779]: I0929 10:26:50.728430 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1c5a9de7-c943-4654-bb1a-087fd1bb739e" path="/var/lib/kubelet/pods/1c5a9de7-c943-4654-bb1a-087fd1bb739e/volumes" Sep 29 10:26:50 crc kubenswrapper[4779]: I0929 10:26:50.730466 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="591ac1c1-65cb-4471-a9ee-8e4d17e9d395" path="/var/lib/kubelet/pods/591ac1c1-65cb-4471-a9ee-8e4d17e9d395/volumes" Sep 29 10:26:51 crc kubenswrapper[4779]: I0929 10:26:51.001390 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" event={"ID":"f1a5d3a7-37d9-4a87-864c-e4af7f504a19","Type":"ContainerStarted","Data":"860d5151671128f235100da6f6666ce427faedc693e288aca6ad696cc5d85668"} Sep 29 10:26:55 crc kubenswrapper[4779]: I0929 10:26:55.043534 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"65057b07-2357-4d32-bd20-dee421ca5041","Type":"ContainerStarted","Data":"cf59874883e98a706abfa493b46329348e37bab7af6a1530b933d27a5f60ca4f"} Sep 29 10:27:03 crc kubenswrapper[4779]: I0929 10:27:03.126398 4779 generic.go:334] "Generic (PLEG): container finished" podID="65057b07-2357-4d32-bd20-dee421ca5041" containerID="cf59874883e98a706abfa493b46329348e37bab7af6a1530b933d27a5f60ca4f" exitCode=0 Sep 29 10:27:03 crc kubenswrapper[4779]: I0929 10:27:03.126875 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"65057b07-2357-4d32-bd20-dee421ca5041","Type":"ContainerDied","Data":"cf59874883e98a706abfa493b46329348e37bab7af6a1530b933d27a5f60ca4f"} Sep 29 10:27:04 crc kubenswrapper[4779]: I0929 10:27:04.138359 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"65057b07-2357-4d32-bd20-dee421ca5041","Type":"ContainerStarted","Data":"0d8c351d66cfbdac4bb0f26dacb11d39803868192e61e7d186b3cec7781e2588"} Sep 29 10:27:08 crc kubenswrapper[4779]: I0929 10:27:08.183617 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"65057b07-2357-4d32-bd20-dee421ca5041","Type":"ContainerStarted","Data":"87024e667ad3cc0c3b70944a0560c5fbe3a236b3d89775bf4e3edf4d9cb693f9"} Sep 29 10:27:08 crc kubenswrapper[4779]: I0929 10:27:08.184162 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"65057b07-2357-4d32-bd20-dee421ca5041","Type":"ContainerStarted","Data":"33f5ba0667a3304da88266dc040e11ed56dd2e77f640bc7423dc140b43b77821"} Sep 29 10:27:08 crc kubenswrapper[4779]: I0929 10:27:08.232303 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=19.232275921 podStartE2EDuration="19.232275921s" podCreationTimestamp="2025-09-29 10:26:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 10:27:08.214218701 +0000 UTC m=+3460.195542625" watchObservedRunningTime="2025-09-29 10:27:08.232275921 +0000 UTC m=+3460.213599845" Sep 29 10:27:09 crc kubenswrapper[4779]: I0929 10:27:09.448019 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Sep 29 10:27:19 crc kubenswrapper[4779]: I0929 10:27:19.448056 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Sep 29 10:27:19 crc kubenswrapper[4779]: I0929 10:27:19.457497 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Sep 29 10:27:20 crc kubenswrapper[4779]: I0929 10:27:20.327989 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Sep 29 10:27:38 crc kubenswrapper[4779]: I0929 10:27:38.240864 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest"] Sep 29 10:27:38 crc kubenswrapper[4779]: I0929 10:27:38.243629 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Sep 29 10:27:38 crc kubenswrapper[4779]: I0929 10:27:38.246002 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-xdtbp" Sep 29 10:27:38 crc kubenswrapper[4779]: I0929 10:27:38.246041 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Sep 29 10:27:38 crc kubenswrapper[4779]: I0929 10:27:38.246575 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Sep 29 10:27:38 crc kubenswrapper[4779]: I0929 10:27:38.248793 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Sep 29 10:27:38 crc kubenswrapper[4779]: I0929 10:27:38.257068 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Sep 29 10:27:38 crc kubenswrapper[4779]: I0929 10:27:38.319540 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/42cdeff3-8be3-4a4c-8c71-67d0f8ec2624-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"42cdeff3-8be3-4a4c-8c71-67d0f8ec2624\") " pod="openstack/tempest-tests-tempest" Sep 29 10:27:38 crc kubenswrapper[4779]: I0929 10:27:38.319632 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/42cdeff3-8be3-4a4c-8c71-67d0f8ec2624-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"42cdeff3-8be3-4a4c-8c71-67d0f8ec2624\") " pod="openstack/tempest-tests-tempest" Sep 29 10:27:38 crc kubenswrapper[4779]: I0929 10:27:38.319758 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/42cdeff3-8be3-4a4c-8c71-67d0f8ec2624-config-data\") pod \"tempest-tests-tempest\" (UID: \"42cdeff3-8be3-4a4c-8c71-67d0f8ec2624\") " pod="openstack/tempest-tests-tempest" Sep 29 10:27:38 crc kubenswrapper[4779]: I0929 10:27:38.319810 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/42cdeff3-8be3-4a4c-8c71-67d0f8ec2624-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"42cdeff3-8be3-4a4c-8c71-67d0f8ec2624\") " pod="openstack/tempest-tests-tempest" Sep 29 10:27:38 crc kubenswrapper[4779]: I0929 10:27:38.319843 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/42cdeff3-8be3-4a4c-8c71-67d0f8ec2624-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"42cdeff3-8be3-4a4c-8c71-67d0f8ec2624\") " pod="openstack/tempest-tests-tempest" Sep 29 10:27:38 crc kubenswrapper[4779]: I0929 10:27:38.319991 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/42cdeff3-8be3-4a4c-8c71-67d0f8ec2624-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"42cdeff3-8be3-4a4c-8c71-67d0f8ec2624\") " pod="openstack/tempest-tests-tempest" Sep 29 10:27:38 crc kubenswrapper[4779]: I0929 10:27:38.320184 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"tempest-tests-tempest\" (UID: \"42cdeff3-8be3-4a4c-8c71-67d0f8ec2624\") " pod="openstack/tempest-tests-tempest" Sep 29 10:27:38 crc kubenswrapper[4779]: I0929 10:27:38.320230 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nmmss\" (UniqueName: \"kubernetes.io/projected/42cdeff3-8be3-4a4c-8c71-67d0f8ec2624-kube-api-access-nmmss\") pod \"tempest-tests-tempest\" (UID: \"42cdeff3-8be3-4a4c-8c71-67d0f8ec2624\") " pod="openstack/tempest-tests-tempest" Sep 29 10:27:38 crc kubenswrapper[4779]: I0929 10:27:38.320294 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/42cdeff3-8be3-4a4c-8c71-67d0f8ec2624-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"42cdeff3-8be3-4a4c-8c71-67d0f8ec2624\") " pod="openstack/tempest-tests-tempest" Sep 29 10:27:38 crc kubenswrapper[4779]: I0929 10:27:38.422475 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/42cdeff3-8be3-4a4c-8c71-67d0f8ec2624-config-data\") pod \"tempest-tests-tempest\" (UID: \"42cdeff3-8be3-4a4c-8c71-67d0f8ec2624\") " pod="openstack/tempest-tests-tempest" Sep 29 10:27:38 crc kubenswrapper[4779]: I0929 10:27:38.422610 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/42cdeff3-8be3-4a4c-8c71-67d0f8ec2624-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"42cdeff3-8be3-4a4c-8c71-67d0f8ec2624\") " pod="openstack/tempest-tests-tempest" Sep 29 10:27:38 crc kubenswrapper[4779]: I0929 10:27:38.422637 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/42cdeff3-8be3-4a4c-8c71-67d0f8ec2624-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"42cdeff3-8be3-4a4c-8c71-67d0f8ec2624\") " pod="openstack/tempest-tests-tempest" Sep 29 10:27:38 crc kubenswrapper[4779]: I0929 10:27:38.422727 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/42cdeff3-8be3-4a4c-8c71-67d0f8ec2624-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"42cdeff3-8be3-4a4c-8c71-67d0f8ec2624\") " pod="openstack/tempest-tests-tempest" Sep 29 10:27:38 crc kubenswrapper[4779]: I0929 10:27:38.422874 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"tempest-tests-tempest\" (UID: \"42cdeff3-8be3-4a4c-8c71-67d0f8ec2624\") " pod="openstack/tempest-tests-tempest" Sep 29 10:27:38 crc kubenswrapper[4779]: I0929 10:27:38.423034 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nmmss\" (UniqueName: \"kubernetes.io/projected/42cdeff3-8be3-4a4c-8c71-67d0f8ec2624-kube-api-access-nmmss\") pod \"tempest-tests-tempest\" (UID: \"42cdeff3-8be3-4a4c-8c71-67d0f8ec2624\") " pod="openstack/tempest-tests-tempest" Sep 29 10:27:38 crc kubenswrapper[4779]: I0929 10:27:38.423116 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/42cdeff3-8be3-4a4c-8c71-67d0f8ec2624-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"42cdeff3-8be3-4a4c-8c71-67d0f8ec2624\") " pod="openstack/tempest-tests-tempest" Sep 29 10:27:38 crc kubenswrapper[4779]: I0929 10:27:38.423147 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/42cdeff3-8be3-4a4c-8c71-67d0f8ec2624-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"42cdeff3-8be3-4a4c-8c71-67d0f8ec2624\") " pod="openstack/tempest-tests-tempest" Sep 29 10:27:38 crc kubenswrapper[4779]: I0929 10:27:38.423188 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/42cdeff3-8be3-4a4c-8c71-67d0f8ec2624-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"42cdeff3-8be3-4a4c-8c71-67d0f8ec2624\") " pod="openstack/tempest-tests-tempest" Sep 29 10:27:38 crc kubenswrapper[4779]: I0929 10:27:38.423318 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/42cdeff3-8be3-4a4c-8c71-67d0f8ec2624-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"42cdeff3-8be3-4a4c-8c71-67d0f8ec2624\") " pod="openstack/tempest-tests-tempest" Sep 29 10:27:38 crc kubenswrapper[4779]: I0929 10:27:38.424135 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/42cdeff3-8be3-4a4c-8c71-67d0f8ec2624-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"42cdeff3-8be3-4a4c-8c71-67d0f8ec2624\") " pod="openstack/tempest-tests-tempest" Sep 29 10:27:38 crc kubenswrapper[4779]: I0929 10:27:38.426644 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/42cdeff3-8be3-4a4c-8c71-67d0f8ec2624-config-data\") pod \"tempest-tests-tempest\" (UID: \"42cdeff3-8be3-4a4c-8c71-67d0f8ec2624\") " pod="openstack/tempest-tests-tempest" Sep 29 10:27:38 crc kubenswrapper[4779]: I0929 10:27:38.426654 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/42cdeff3-8be3-4a4c-8c71-67d0f8ec2624-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"42cdeff3-8be3-4a4c-8c71-67d0f8ec2624\") " pod="openstack/tempest-tests-tempest" Sep 29 10:27:38 crc kubenswrapper[4779]: I0929 10:27:38.426684 4779 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"tempest-tests-tempest\" (UID: \"42cdeff3-8be3-4a4c-8c71-67d0f8ec2624\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/tempest-tests-tempest" Sep 29 10:27:38 crc kubenswrapper[4779]: I0929 10:27:38.431249 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/42cdeff3-8be3-4a4c-8c71-67d0f8ec2624-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"42cdeff3-8be3-4a4c-8c71-67d0f8ec2624\") " pod="openstack/tempest-tests-tempest" Sep 29 10:27:38 crc kubenswrapper[4779]: I0929 10:27:38.439531 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/42cdeff3-8be3-4a4c-8c71-67d0f8ec2624-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"42cdeff3-8be3-4a4c-8c71-67d0f8ec2624\") " pod="openstack/tempest-tests-tempest" Sep 29 10:27:38 crc kubenswrapper[4779]: I0929 10:27:38.444723 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/42cdeff3-8be3-4a4c-8c71-67d0f8ec2624-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"42cdeff3-8be3-4a4c-8c71-67d0f8ec2624\") " pod="openstack/tempest-tests-tempest" Sep 29 10:27:38 crc kubenswrapper[4779]: I0929 10:27:38.459745 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nmmss\" (UniqueName: \"kubernetes.io/projected/42cdeff3-8be3-4a4c-8c71-67d0f8ec2624-kube-api-access-nmmss\") pod \"tempest-tests-tempest\" (UID: \"42cdeff3-8be3-4a4c-8c71-67d0f8ec2624\") " pod="openstack/tempest-tests-tempest" Sep 29 10:27:38 crc kubenswrapper[4779]: I0929 10:27:38.482158 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"tempest-tests-tempest\" (UID: \"42cdeff3-8be3-4a4c-8c71-67d0f8ec2624\") " pod="openstack/tempest-tests-tempest" Sep 29 10:27:38 crc kubenswrapper[4779]: I0929 10:27:38.614200 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Sep 29 10:27:39 crc kubenswrapper[4779]: I0929 10:27:39.108207 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Sep 29 10:27:39 crc kubenswrapper[4779]: I0929 10:27:39.117159 4779 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 10:27:39 crc kubenswrapper[4779]: I0929 10:27:39.522353 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"42cdeff3-8be3-4a4c-8c71-67d0f8ec2624","Type":"ContainerStarted","Data":"79d3fb142b9a90b0c99327a849b21736440f12d54585ec842382679afdafb5c9"} Sep 29 10:27:50 crc kubenswrapper[4779]: I0929 10:27:50.655627 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"42cdeff3-8be3-4a4c-8c71-67d0f8ec2624","Type":"ContainerStarted","Data":"99bf52254012cebec2ddd98a0b3e64db862af78c9f09b3598ee9e71b47c9e1a4"} Sep 29 10:27:50 crc kubenswrapper[4779]: I0929 10:27:50.679248 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest" podStartSLOduration=3.278630848 podStartE2EDuration="13.679231085s" podCreationTimestamp="2025-09-29 10:27:37 +0000 UTC" firstStartedPulling="2025-09-29 10:27:39.11694907 +0000 UTC m=+3491.098272974" lastFinishedPulling="2025-09-29 10:27:49.517549307 +0000 UTC m=+3501.498873211" observedRunningTime="2025-09-29 10:27:50.674557611 +0000 UTC m=+3502.655881525" watchObservedRunningTime="2025-09-29 10:27:50.679231085 +0000 UTC m=+3502.660554989" Sep 29 10:28:29 crc kubenswrapper[4779]: I0929 10:28:29.290949 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-jdzkr"] Sep 29 10:28:29 crc kubenswrapper[4779]: I0929 10:28:29.293873 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jdzkr" Sep 29 10:28:29 crc kubenswrapper[4779]: I0929 10:28:29.304244 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-jdzkr"] Sep 29 10:28:29 crc kubenswrapper[4779]: I0929 10:28:29.406691 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hwfnc\" (UniqueName: \"kubernetes.io/projected/cf137010-52e8-4e7a-893a-8916103476cd-kube-api-access-hwfnc\") pod \"redhat-marketplace-jdzkr\" (UID: \"cf137010-52e8-4e7a-893a-8916103476cd\") " pod="openshift-marketplace/redhat-marketplace-jdzkr" Sep 29 10:28:29 crc kubenswrapper[4779]: I0929 10:28:29.406772 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf137010-52e8-4e7a-893a-8916103476cd-utilities\") pod \"redhat-marketplace-jdzkr\" (UID: \"cf137010-52e8-4e7a-893a-8916103476cd\") " pod="openshift-marketplace/redhat-marketplace-jdzkr" Sep 29 10:28:29 crc kubenswrapper[4779]: I0929 10:28:29.407067 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf137010-52e8-4e7a-893a-8916103476cd-catalog-content\") pod \"redhat-marketplace-jdzkr\" (UID: \"cf137010-52e8-4e7a-893a-8916103476cd\") " pod="openshift-marketplace/redhat-marketplace-jdzkr" Sep 29 10:28:29 crc kubenswrapper[4779]: I0929 10:28:29.508935 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hwfnc\" (UniqueName: \"kubernetes.io/projected/cf137010-52e8-4e7a-893a-8916103476cd-kube-api-access-hwfnc\") pod \"redhat-marketplace-jdzkr\" (UID: \"cf137010-52e8-4e7a-893a-8916103476cd\") " pod="openshift-marketplace/redhat-marketplace-jdzkr" Sep 29 10:28:29 crc kubenswrapper[4779]: I0929 10:28:29.509020 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf137010-52e8-4e7a-893a-8916103476cd-utilities\") pod \"redhat-marketplace-jdzkr\" (UID: \"cf137010-52e8-4e7a-893a-8916103476cd\") " pod="openshift-marketplace/redhat-marketplace-jdzkr" Sep 29 10:28:29 crc kubenswrapper[4779]: I0929 10:28:29.509117 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf137010-52e8-4e7a-893a-8916103476cd-catalog-content\") pod \"redhat-marketplace-jdzkr\" (UID: \"cf137010-52e8-4e7a-893a-8916103476cd\") " pod="openshift-marketplace/redhat-marketplace-jdzkr" Sep 29 10:28:29 crc kubenswrapper[4779]: I0929 10:28:29.509704 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf137010-52e8-4e7a-893a-8916103476cd-utilities\") pod \"redhat-marketplace-jdzkr\" (UID: \"cf137010-52e8-4e7a-893a-8916103476cd\") " pod="openshift-marketplace/redhat-marketplace-jdzkr" Sep 29 10:28:29 crc kubenswrapper[4779]: I0929 10:28:29.509749 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf137010-52e8-4e7a-893a-8916103476cd-catalog-content\") pod \"redhat-marketplace-jdzkr\" (UID: \"cf137010-52e8-4e7a-893a-8916103476cd\") " pod="openshift-marketplace/redhat-marketplace-jdzkr" Sep 29 10:28:29 crc kubenswrapper[4779]: I0929 10:28:29.540460 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hwfnc\" (UniqueName: \"kubernetes.io/projected/cf137010-52e8-4e7a-893a-8916103476cd-kube-api-access-hwfnc\") pod \"redhat-marketplace-jdzkr\" (UID: \"cf137010-52e8-4e7a-893a-8916103476cd\") " pod="openshift-marketplace/redhat-marketplace-jdzkr" Sep 29 10:28:29 crc kubenswrapper[4779]: I0929 10:28:29.620934 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jdzkr" Sep 29 10:28:30 crc kubenswrapper[4779]: I0929 10:28:30.104624 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-jdzkr"] Sep 29 10:28:31 crc kubenswrapper[4779]: I0929 10:28:31.097892 4779 generic.go:334] "Generic (PLEG): container finished" podID="cf137010-52e8-4e7a-893a-8916103476cd" containerID="e4962ef12c166ae424c74ed9f9bd508df49cae8dfaf3172eff84713dc1b5b4cc" exitCode=0 Sep 29 10:28:31 crc kubenswrapper[4779]: I0929 10:28:31.097990 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jdzkr" event={"ID":"cf137010-52e8-4e7a-893a-8916103476cd","Type":"ContainerDied","Data":"e4962ef12c166ae424c74ed9f9bd508df49cae8dfaf3172eff84713dc1b5b4cc"} Sep 29 10:28:31 crc kubenswrapper[4779]: I0929 10:28:31.098203 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jdzkr" event={"ID":"cf137010-52e8-4e7a-893a-8916103476cd","Type":"ContainerStarted","Data":"898789b50365a68dd07e7cce17ce2ef142dcbed780a538430298e5cb6ed41224"} Sep 29 10:28:32 crc kubenswrapper[4779]: I0929 10:28:32.110190 4779 generic.go:334] "Generic (PLEG): container finished" podID="cf137010-52e8-4e7a-893a-8916103476cd" containerID="aa4109fbfa8dc5f62c0c643cc617a23f99f43156dde8daba87f2e5aad3e243b6" exitCode=0 Sep 29 10:28:32 crc kubenswrapper[4779]: I0929 10:28:32.110405 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jdzkr" event={"ID":"cf137010-52e8-4e7a-893a-8916103476cd","Type":"ContainerDied","Data":"aa4109fbfa8dc5f62c0c643cc617a23f99f43156dde8daba87f2e5aad3e243b6"} Sep 29 10:28:33 crc kubenswrapper[4779]: I0929 10:28:33.122605 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jdzkr" event={"ID":"cf137010-52e8-4e7a-893a-8916103476cd","Type":"ContainerStarted","Data":"739a197e37c84941610518d40d61d35e88b30ba7358a97e7ed68d99c89fa6c43"} Sep 29 10:28:33 crc kubenswrapper[4779]: I0929 10:28:33.143593 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-jdzkr" podStartSLOduration=2.673612584 podStartE2EDuration="4.14357283s" podCreationTimestamp="2025-09-29 10:28:29 +0000 UTC" firstStartedPulling="2025-09-29 10:28:31.09986981 +0000 UTC m=+3543.081193714" lastFinishedPulling="2025-09-29 10:28:32.569830056 +0000 UTC m=+3544.551153960" observedRunningTime="2025-09-29 10:28:33.142724446 +0000 UTC m=+3545.124048350" watchObservedRunningTime="2025-09-29 10:28:33.14357283 +0000 UTC m=+3545.124896734" Sep 29 10:28:39 crc kubenswrapper[4779]: I0929 10:28:39.621746 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-jdzkr" Sep 29 10:28:39 crc kubenswrapper[4779]: I0929 10:28:39.622659 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-jdzkr" Sep 29 10:28:39 crc kubenswrapper[4779]: I0929 10:28:39.673137 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-jdzkr" Sep 29 10:28:40 crc kubenswrapper[4779]: I0929 10:28:40.241626 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-jdzkr" Sep 29 10:28:40 crc kubenswrapper[4779]: I0929 10:28:40.290354 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-jdzkr"] Sep 29 10:28:42 crc kubenswrapper[4779]: I0929 10:28:42.207119 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-jdzkr" podUID="cf137010-52e8-4e7a-893a-8916103476cd" containerName="registry-server" containerID="cri-o://739a197e37c84941610518d40d61d35e88b30ba7358a97e7ed68d99c89fa6c43" gracePeriod=2 Sep 29 10:28:42 crc kubenswrapper[4779]: I0929 10:28:42.682430 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jdzkr" Sep 29 10:28:42 crc kubenswrapper[4779]: I0929 10:28:42.851584 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf137010-52e8-4e7a-893a-8916103476cd-catalog-content\") pod \"cf137010-52e8-4e7a-893a-8916103476cd\" (UID: \"cf137010-52e8-4e7a-893a-8916103476cd\") " Sep 29 10:28:42 crc kubenswrapper[4779]: I0929 10:28:42.851661 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf137010-52e8-4e7a-893a-8916103476cd-utilities\") pod \"cf137010-52e8-4e7a-893a-8916103476cd\" (UID: \"cf137010-52e8-4e7a-893a-8916103476cd\") " Sep 29 10:28:42 crc kubenswrapper[4779]: I0929 10:28:42.851780 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hwfnc\" (UniqueName: \"kubernetes.io/projected/cf137010-52e8-4e7a-893a-8916103476cd-kube-api-access-hwfnc\") pod \"cf137010-52e8-4e7a-893a-8916103476cd\" (UID: \"cf137010-52e8-4e7a-893a-8916103476cd\") " Sep 29 10:28:42 crc kubenswrapper[4779]: I0929 10:28:42.853701 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cf137010-52e8-4e7a-893a-8916103476cd-utilities" (OuterVolumeSpecName: "utilities") pod "cf137010-52e8-4e7a-893a-8916103476cd" (UID: "cf137010-52e8-4e7a-893a-8916103476cd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 10:28:42 crc kubenswrapper[4779]: I0929 10:28:42.861681 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf137010-52e8-4e7a-893a-8916103476cd-kube-api-access-hwfnc" (OuterVolumeSpecName: "kube-api-access-hwfnc") pod "cf137010-52e8-4e7a-893a-8916103476cd" (UID: "cf137010-52e8-4e7a-893a-8916103476cd"). InnerVolumeSpecName "kube-api-access-hwfnc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 10:28:42 crc kubenswrapper[4779]: I0929 10:28:42.892720 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cf137010-52e8-4e7a-893a-8916103476cd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cf137010-52e8-4e7a-893a-8916103476cd" (UID: "cf137010-52e8-4e7a-893a-8916103476cd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 10:28:42 crc kubenswrapper[4779]: I0929 10:28:42.956423 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf137010-52e8-4e7a-893a-8916103476cd-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 10:28:42 crc kubenswrapper[4779]: I0929 10:28:42.956490 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf137010-52e8-4e7a-893a-8916103476cd-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 10:28:42 crc kubenswrapper[4779]: I0929 10:28:42.956515 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hwfnc\" (UniqueName: \"kubernetes.io/projected/cf137010-52e8-4e7a-893a-8916103476cd-kube-api-access-hwfnc\") on node \"crc\" DevicePath \"\"" Sep 29 10:28:43 crc kubenswrapper[4779]: I0929 10:28:43.224157 4779 generic.go:334] "Generic (PLEG): container finished" podID="cf137010-52e8-4e7a-893a-8916103476cd" containerID="739a197e37c84941610518d40d61d35e88b30ba7358a97e7ed68d99c89fa6c43" exitCode=0 Sep 29 10:28:43 crc kubenswrapper[4779]: I0929 10:28:43.224288 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jdzkr" Sep 29 10:28:43 crc kubenswrapper[4779]: I0929 10:28:43.224276 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jdzkr" event={"ID":"cf137010-52e8-4e7a-893a-8916103476cd","Type":"ContainerDied","Data":"739a197e37c84941610518d40d61d35e88b30ba7358a97e7ed68d99c89fa6c43"} Sep 29 10:28:43 crc kubenswrapper[4779]: I0929 10:28:43.225048 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jdzkr" event={"ID":"cf137010-52e8-4e7a-893a-8916103476cd","Type":"ContainerDied","Data":"898789b50365a68dd07e7cce17ce2ef142dcbed780a538430298e5cb6ed41224"} Sep 29 10:28:43 crc kubenswrapper[4779]: I0929 10:28:43.225075 4779 scope.go:117] "RemoveContainer" containerID="739a197e37c84941610518d40d61d35e88b30ba7358a97e7ed68d99c89fa6c43" Sep 29 10:28:43 crc kubenswrapper[4779]: I0929 10:28:43.276456 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-jdzkr"] Sep 29 10:28:43 crc kubenswrapper[4779]: I0929 10:28:43.285884 4779 scope.go:117] "RemoveContainer" containerID="aa4109fbfa8dc5f62c0c643cc617a23f99f43156dde8daba87f2e5aad3e243b6" Sep 29 10:28:43 crc kubenswrapper[4779]: I0929 10:28:43.287679 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-jdzkr"] Sep 29 10:28:43 crc kubenswrapper[4779]: I0929 10:28:43.316418 4779 scope.go:117] "RemoveContainer" containerID="e4962ef12c166ae424c74ed9f9bd508df49cae8dfaf3172eff84713dc1b5b4cc" Sep 29 10:28:43 crc kubenswrapper[4779]: I0929 10:28:43.362867 4779 scope.go:117] "RemoveContainer" containerID="739a197e37c84941610518d40d61d35e88b30ba7358a97e7ed68d99c89fa6c43" Sep 29 10:28:43 crc kubenswrapper[4779]: E0929 10:28:43.363471 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"739a197e37c84941610518d40d61d35e88b30ba7358a97e7ed68d99c89fa6c43\": container with ID starting with 739a197e37c84941610518d40d61d35e88b30ba7358a97e7ed68d99c89fa6c43 not found: ID does not exist" containerID="739a197e37c84941610518d40d61d35e88b30ba7358a97e7ed68d99c89fa6c43" Sep 29 10:28:43 crc kubenswrapper[4779]: I0929 10:28:43.363613 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"739a197e37c84941610518d40d61d35e88b30ba7358a97e7ed68d99c89fa6c43"} err="failed to get container status \"739a197e37c84941610518d40d61d35e88b30ba7358a97e7ed68d99c89fa6c43\": rpc error: code = NotFound desc = could not find container \"739a197e37c84941610518d40d61d35e88b30ba7358a97e7ed68d99c89fa6c43\": container with ID starting with 739a197e37c84941610518d40d61d35e88b30ba7358a97e7ed68d99c89fa6c43 not found: ID does not exist" Sep 29 10:28:43 crc kubenswrapper[4779]: I0929 10:28:43.363833 4779 scope.go:117] "RemoveContainer" containerID="aa4109fbfa8dc5f62c0c643cc617a23f99f43156dde8daba87f2e5aad3e243b6" Sep 29 10:28:43 crc kubenswrapper[4779]: E0929 10:28:43.364294 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aa4109fbfa8dc5f62c0c643cc617a23f99f43156dde8daba87f2e5aad3e243b6\": container with ID starting with aa4109fbfa8dc5f62c0c643cc617a23f99f43156dde8daba87f2e5aad3e243b6 not found: ID does not exist" containerID="aa4109fbfa8dc5f62c0c643cc617a23f99f43156dde8daba87f2e5aad3e243b6" Sep 29 10:28:43 crc kubenswrapper[4779]: I0929 10:28:43.364444 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa4109fbfa8dc5f62c0c643cc617a23f99f43156dde8daba87f2e5aad3e243b6"} err="failed to get container status \"aa4109fbfa8dc5f62c0c643cc617a23f99f43156dde8daba87f2e5aad3e243b6\": rpc error: code = NotFound desc = could not find container \"aa4109fbfa8dc5f62c0c643cc617a23f99f43156dde8daba87f2e5aad3e243b6\": container with ID starting with aa4109fbfa8dc5f62c0c643cc617a23f99f43156dde8daba87f2e5aad3e243b6 not found: ID does not exist" Sep 29 10:28:43 crc kubenswrapper[4779]: I0929 10:28:43.364568 4779 scope.go:117] "RemoveContainer" containerID="e4962ef12c166ae424c74ed9f9bd508df49cae8dfaf3172eff84713dc1b5b4cc" Sep 29 10:28:43 crc kubenswrapper[4779]: E0929 10:28:43.365306 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e4962ef12c166ae424c74ed9f9bd508df49cae8dfaf3172eff84713dc1b5b4cc\": container with ID starting with e4962ef12c166ae424c74ed9f9bd508df49cae8dfaf3172eff84713dc1b5b4cc not found: ID does not exist" containerID="e4962ef12c166ae424c74ed9f9bd508df49cae8dfaf3172eff84713dc1b5b4cc" Sep 29 10:28:43 crc kubenswrapper[4779]: I0929 10:28:43.365350 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e4962ef12c166ae424c74ed9f9bd508df49cae8dfaf3172eff84713dc1b5b4cc"} err="failed to get container status \"e4962ef12c166ae424c74ed9f9bd508df49cae8dfaf3172eff84713dc1b5b4cc\": rpc error: code = NotFound desc = could not find container \"e4962ef12c166ae424c74ed9f9bd508df49cae8dfaf3172eff84713dc1b5b4cc\": container with ID starting with e4962ef12c166ae424c74ed9f9bd508df49cae8dfaf3172eff84713dc1b5b4cc not found: ID does not exist" Sep 29 10:28:44 crc kubenswrapper[4779]: I0929 10:28:44.725247 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cf137010-52e8-4e7a-893a-8916103476cd" path="/var/lib/kubelet/pods/cf137010-52e8-4e7a-893a-8916103476cd/volumes" Sep 29 10:29:16 crc kubenswrapper[4779]: I0929 10:29:16.966805 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 10:29:16 crc kubenswrapper[4779]: I0929 10:29:16.967523 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 10:29:46 crc kubenswrapper[4779]: I0929 10:29:46.966407 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 10:29:46 crc kubenswrapper[4779]: I0929 10:29:46.967181 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 10:30:00 crc kubenswrapper[4779]: I0929 10:30:00.176585 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319030-krpvl"] Sep 29 10:30:00 crc kubenswrapper[4779]: E0929 10:30:00.178290 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf137010-52e8-4e7a-893a-8916103476cd" containerName="extract-utilities" Sep 29 10:30:00 crc kubenswrapper[4779]: I0929 10:30:00.178312 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf137010-52e8-4e7a-893a-8916103476cd" containerName="extract-utilities" Sep 29 10:30:00 crc kubenswrapper[4779]: E0929 10:30:00.178347 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf137010-52e8-4e7a-893a-8916103476cd" containerName="extract-content" Sep 29 10:30:00 crc kubenswrapper[4779]: I0929 10:30:00.178450 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf137010-52e8-4e7a-893a-8916103476cd" containerName="extract-content" Sep 29 10:30:00 crc kubenswrapper[4779]: E0929 10:30:00.178464 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf137010-52e8-4e7a-893a-8916103476cd" containerName="registry-server" Sep 29 10:30:00 crc kubenswrapper[4779]: I0929 10:30:00.178473 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf137010-52e8-4e7a-893a-8916103476cd" containerName="registry-server" Sep 29 10:30:00 crc kubenswrapper[4779]: I0929 10:30:00.178798 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf137010-52e8-4e7a-893a-8916103476cd" containerName="registry-server" Sep 29 10:30:00 crc kubenswrapper[4779]: I0929 10:30:00.179980 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319030-krpvl" Sep 29 10:30:00 crc kubenswrapper[4779]: I0929 10:30:00.182200 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 29 10:30:00 crc kubenswrapper[4779]: I0929 10:30:00.182396 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 29 10:30:00 crc kubenswrapper[4779]: I0929 10:30:00.189723 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319030-krpvl"] Sep 29 10:30:00 crc kubenswrapper[4779]: I0929 10:30:00.305896 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fcca22c5-a7c4-4ed8-83e9-a9805d21b170-config-volume\") pod \"collect-profiles-29319030-krpvl\" (UID: \"fcca22c5-a7c4-4ed8-83e9-a9805d21b170\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319030-krpvl" Sep 29 10:30:00 crc kubenswrapper[4779]: I0929 10:30:00.306374 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fcca22c5-a7c4-4ed8-83e9-a9805d21b170-secret-volume\") pod \"collect-profiles-29319030-krpvl\" (UID: \"fcca22c5-a7c4-4ed8-83e9-a9805d21b170\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319030-krpvl" Sep 29 10:30:00 crc kubenswrapper[4779]: I0929 10:30:00.306669 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cs6sf\" (UniqueName: \"kubernetes.io/projected/fcca22c5-a7c4-4ed8-83e9-a9805d21b170-kube-api-access-cs6sf\") pod \"collect-profiles-29319030-krpvl\" (UID: \"fcca22c5-a7c4-4ed8-83e9-a9805d21b170\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319030-krpvl" Sep 29 10:30:00 crc kubenswrapper[4779]: I0929 10:30:00.409231 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cs6sf\" (UniqueName: \"kubernetes.io/projected/fcca22c5-a7c4-4ed8-83e9-a9805d21b170-kube-api-access-cs6sf\") pod \"collect-profiles-29319030-krpvl\" (UID: \"fcca22c5-a7c4-4ed8-83e9-a9805d21b170\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319030-krpvl" Sep 29 10:30:00 crc kubenswrapper[4779]: I0929 10:30:00.409336 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fcca22c5-a7c4-4ed8-83e9-a9805d21b170-config-volume\") pod \"collect-profiles-29319030-krpvl\" (UID: \"fcca22c5-a7c4-4ed8-83e9-a9805d21b170\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319030-krpvl" Sep 29 10:30:00 crc kubenswrapper[4779]: I0929 10:30:00.409381 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fcca22c5-a7c4-4ed8-83e9-a9805d21b170-secret-volume\") pod \"collect-profiles-29319030-krpvl\" (UID: \"fcca22c5-a7c4-4ed8-83e9-a9805d21b170\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319030-krpvl" Sep 29 10:30:00 crc kubenswrapper[4779]: I0929 10:30:00.410685 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fcca22c5-a7c4-4ed8-83e9-a9805d21b170-config-volume\") pod \"collect-profiles-29319030-krpvl\" (UID: \"fcca22c5-a7c4-4ed8-83e9-a9805d21b170\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319030-krpvl" Sep 29 10:30:00 crc kubenswrapper[4779]: I0929 10:30:00.416843 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fcca22c5-a7c4-4ed8-83e9-a9805d21b170-secret-volume\") pod \"collect-profiles-29319030-krpvl\" (UID: \"fcca22c5-a7c4-4ed8-83e9-a9805d21b170\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319030-krpvl" Sep 29 10:30:00 crc kubenswrapper[4779]: I0929 10:30:00.429203 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cs6sf\" (UniqueName: \"kubernetes.io/projected/fcca22c5-a7c4-4ed8-83e9-a9805d21b170-kube-api-access-cs6sf\") pod \"collect-profiles-29319030-krpvl\" (UID: \"fcca22c5-a7c4-4ed8-83e9-a9805d21b170\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319030-krpvl" Sep 29 10:30:00 crc kubenswrapper[4779]: I0929 10:30:00.509231 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319030-krpvl" Sep 29 10:30:00 crc kubenswrapper[4779]: I0929 10:30:00.985810 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319030-krpvl"] Sep 29 10:30:01 crc kubenswrapper[4779]: I0929 10:30:01.023032 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319030-krpvl" event={"ID":"fcca22c5-a7c4-4ed8-83e9-a9805d21b170","Type":"ContainerStarted","Data":"1ded2fd712a688e0b469e95296817d9f0905808e9911d0fbfb8d11dd74b9c680"} Sep 29 10:30:02 crc kubenswrapper[4779]: I0929 10:30:02.036569 4779 generic.go:334] "Generic (PLEG): container finished" podID="fcca22c5-a7c4-4ed8-83e9-a9805d21b170" containerID="00bd2d70cd8e7fa96b362852fef8b45251c23cacfaf7e75668000639ecd06a06" exitCode=0 Sep 29 10:30:02 crc kubenswrapper[4779]: I0929 10:30:02.036667 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319030-krpvl" event={"ID":"fcca22c5-a7c4-4ed8-83e9-a9805d21b170","Type":"ContainerDied","Data":"00bd2d70cd8e7fa96b362852fef8b45251c23cacfaf7e75668000639ecd06a06"} Sep 29 10:30:03 crc kubenswrapper[4779]: I0929 10:30:03.437504 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319030-krpvl" Sep 29 10:30:03 crc kubenswrapper[4779]: I0929 10:30:03.582299 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fcca22c5-a7c4-4ed8-83e9-a9805d21b170-config-volume\") pod \"fcca22c5-a7c4-4ed8-83e9-a9805d21b170\" (UID: \"fcca22c5-a7c4-4ed8-83e9-a9805d21b170\") " Sep 29 10:30:03 crc kubenswrapper[4779]: I0929 10:30:03.582439 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cs6sf\" (UniqueName: \"kubernetes.io/projected/fcca22c5-a7c4-4ed8-83e9-a9805d21b170-kube-api-access-cs6sf\") pod \"fcca22c5-a7c4-4ed8-83e9-a9805d21b170\" (UID: \"fcca22c5-a7c4-4ed8-83e9-a9805d21b170\") " Sep 29 10:30:03 crc kubenswrapper[4779]: I0929 10:30:03.582568 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fcca22c5-a7c4-4ed8-83e9-a9805d21b170-secret-volume\") pod \"fcca22c5-a7c4-4ed8-83e9-a9805d21b170\" (UID: \"fcca22c5-a7c4-4ed8-83e9-a9805d21b170\") " Sep 29 10:30:03 crc kubenswrapper[4779]: I0929 10:30:03.583045 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fcca22c5-a7c4-4ed8-83e9-a9805d21b170-config-volume" (OuterVolumeSpecName: "config-volume") pod "fcca22c5-a7c4-4ed8-83e9-a9805d21b170" (UID: "fcca22c5-a7c4-4ed8-83e9-a9805d21b170"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 10:30:03 crc kubenswrapper[4779]: I0929 10:30:03.583306 4779 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fcca22c5-a7c4-4ed8-83e9-a9805d21b170-config-volume\") on node \"crc\" DevicePath \"\"" Sep 29 10:30:03 crc kubenswrapper[4779]: I0929 10:30:03.593240 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fcca22c5-a7c4-4ed8-83e9-a9805d21b170-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "fcca22c5-a7c4-4ed8-83e9-a9805d21b170" (UID: "fcca22c5-a7c4-4ed8-83e9-a9805d21b170"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:30:03 crc kubenswrapper[4779]: I0929 10:30:03.594368 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fcca22c5-a7c4-4ed8-83e9-a9805d21b170-kube-api-access-cs6sf" (OuterVolumeSpecName: "kube-api-access-cs6sf") pod "fcca22c5-a7c4-4ed8-83e9-a9805d21b170" (UID: "fcca22c5-a7c4-4ed8-83e9-a9805d21b170"). InnerVolumeSpecName "kube-api-access-cs6sf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 10:30:03 crc kubenswrapper[4779]: I0929 10:30:03.685522 4779 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fcca22c5-a7c4-4ed8-83e9-a9805d21b170-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 29 10:30:03 crc kubenswrapper[4779]: I0929 10:30:03.685569 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cs6sf\" (UniqueName: \"kubernetes.io/projected/fcca22c5-a7c4-4ed8-83e9-a9805d21b170-kube-api-access-cs6sf\") on node \"crc\" DevicePath \"\"" Sep 29 10:30:04 crc kubenswrapper[4779]: I0929 10:30:04.067600 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319030-krpvl" event={"ID":"fcca22c5-a7c4-4ed8-83e9-a9805d21b170","Type":"ContainerDied","Data":"1ded2fd712a688e0b469e95296817d9f0905808e9911d0fbfb8d11dd74b9c680"} Sep 29 10:30:04 crc kubenswrapper[4779]: I0929 10:30:04.067977 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1ded2fd712a688e0b469e95296817d9f0905808e9911d0fbfb8d11dd74b9c680" Sep 29 10:30:04 crc kubenswrapper[4779]: I0929 10:30:04.068038 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319030-krpvl" Sep 29 10:30:04 crc kubenswrapper[4779]: I0929 10:30:04.525535 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29318985-gbvrg"] Sep 29 10:30:04 crc kubenswrapper[4779]: I0929 10:30:04.534453 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29318985-gbvrg"] Sep 29 10:30:04 crc kubenswrapper[4779]: I0929 10:30:04.725960 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d40756b7-84bf-4295-a020-07fbd8ffa388" path="/var/lib/kubelet/pods/d40756b7-84bf-4295-a020-07fbd8ffa388/volumes" Sep 29 10:30:16 crc kubenswrapper[4779]: I0929 10:30:16.966743 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 10:30:16 crc kubenswrapper[4779]: I0929 10:30:16.967457 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 10:30:16 crc kubenswrapper[4779]: I0929 10:30:16.967522 4779 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" Sep 29 10:30:16 crc kubenswrapper[4779]: I0929 10:30:16.972340 4779 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"860d5151671128f235100da6f6666ce427faedc693e288aca6ad696cc5d85668"} pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 10:30:16 crc kubenswrapper[4779]: I0929 10:30:16.972477 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" containerID="cri-o://860d5151671128f235100da6f6666ce427faedc693e288aca6ad696cc5d85668" gracePeriod=600 Sep 29 10:30:17 crc kubenswrapper[4779]: I0929 10:30:17.198254 4779 generic.go:334] "Generic (PLEG): container finished" podID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerID="860d5151671128f235100da6f6666ce427faedc693e288aca6ad696cc5d85668" exitCode=0 Sep 29 10:30:17 crc kubenswrapper[4779]: I0929 10:30:17.198607 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" event={"ID":"f1a5d3a7-37d9-4a87-864c-e4af7f504a19","Type":"ContainerDied","Data":"860d5151671128f235100da6f6666ce427faedc693e288aca6ad696cc5d85668"} Sep 29 10:30:17 crc kubenswrapper[4779]: I0929 10:30:17.198645 4779 scope.go:117] "RemoveContainer" containerID="a993e5a0edbe6448eaf61b4b45ffde78e59ae455e408d7dd1afa5a0ff12dc338" Sep 29 10:30:18 crc kubenswrapper[4779]: I0929 10:30:18.208836 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" event={"ID":"f1a5d3a7-37d9-4a87-864c-e4af7f504a19","Type":"ContainerStarted","Data":"8eba7758df3801d50f0b472b05fc6e75b74a3d2186b90b898f737c3e8f2b5401"} Sep 29 10:30:49 crc kubenswrapper[4779]: I0929 10:30:49.492327 4779 scope.go:117] "RemoveContainer" containerID="5b2fdc0671308293ca34f8786edd66bead5898c3c362ea89cd1528aa4121af3a" Sep 29 10:31:49 crc kubenswrapper[4779]: I0929 10:31:49.589286 4779 scope.go:117] "RemoveContainer" containerID="dc565f836847f313cd70faf3481411110cc545e71b90759d4ed39b4c30d06c8f" Sep 29 10:31:56 crc kubenswrapper[4779]: I0929 10:31:56.086342 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-dhmj2"] Sep 29 10:31:56 crc kubenswrapper[4779]: E0929 10:31:56.087388 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fcca22c5-a7c4-4ed8-83e9-a9805d21b170" containerName="collect-profiles" Sep 29 10:31:56 crc kubenswrapper[4779]: I0929 10:31:56.087407 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="fcca22c5-a7c4-4ed8-83e9-a9805d21b170" containerName="collect-profiles" Sep 29 10:31:56 crc kubenswrapper[4779]: I0929 10:31:56.087640 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="fcca22c5-a7c4-4ed8-83e9-a9805d21b170" containerName="collect-profiles" Sep 29 10:31:56 crc kubenswrapper[4779]: I0929 10:31:56.089383 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dhmj2" Sep 29 10:31:56 crc kubenswrapper[4779]: I0929 10:31:56.105495 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-dhmj2"] Sep 29 10:31:56 crc kubenswrapper[4779]: I0929 10:31:56.160323 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a954a8de-2cdb-4648-a9a1-c21724ec103d-catalog-content\") pod \"redhat-operators-dhmj2\" (UID: \"a954a8de-2cdb-4648-a9a1-c21724ec103d\") " pod="openshift-marketplace/redhat-operators-dhmj2" Sep 29 10:31:56 crc kubenswrapper[4779]: I0929 10:31:56.160497 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a954a8de-2cdb-4648-a9a1-c21724ec103d-utilities\") pod \"redhat-operators-dhmj2\" (UID: \"a954a8de-2cdb-4648-a9a1-c21724ec103d\") " pod="openshift-marketplace/redhat-operators-dhmj2" Sep 29 10:31:56 crc kubenswrapper[4779]: I0929 10:31:56.160610 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-twj86\" (UniqueName: \"kubernetes.io/projected/a954a8de-2cdb-4648-a9a1-c21724ec103d-kube-api-access-twj86\") pod \"redhat-operators-dhmj2\" (UID: \"a954a8de-2cdb-4648-a9a1-c21724ec103d\") " pod="openshift-marketplace/redhat-operators-dhmj2" Sep 29 10:31:56 crc kubenswrapper[4779]: I0929 10:31:56.262483 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-twj86\" (UniqueName: \"kubernetes.io/projected/a954a8de-2cdb-4648-a9a1-c21724ec103d-kube-api-access-twj86\") pod \"redhat-operators-dhmj2\" (UID: \"a954a8de-2cdb-4648-a9a1-c21724ec103d\") " pod="openshift-marketplace/redhat-operators-dhmj2" Sep 29 10:31:56 crc kubenswrapper[4779]: I0929 10:31:56.262626 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a954a8de-2cdb-4648-a9a1-c21724ec103d-catalog-content\") pod \"redhat-operators-dhmj2\" (UID: \"a954a8de-2cdb-4648-a9a1-c21724ec103d\") " pod="openshift-marketplace/redhat-operators-dhmj2" Sep 29 10:31:56 crc kubenswrapper[4779]: I0929 10:31:56.262742 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a954a8de-2cdb-4648-a9a1-c21724ec103d-utilities\") pod \"redhat-operators-dhmj2\" (UID: \"a954a8de-2cdb-4648-a9a1-c21724ec103d\") " pod="openshift-marketplace/redhat-operators-dhmj2" Sep 29 10:31:56 crc kubenswrapper[4779]: I0929 10:31:56.263279 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a954a8de-2cdb-4648-a9a1-c21724ec103d-utilities\") pod \"redhat-operators-dhmj2\" (UID: \"a954a8de-2cdb-4648-a9a1-c21724ec103d\") " pod="openshift-marketplace/redhat-operators-dhmj2" Sep 29 10:31:56 crc kubenswrapper[4779]: I0929 10:31:56.263272 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a954a8de-2cdb-4648-a9a1-c21724ec103d-catalog-content\") pod \"redhat-operators-dhmj2\" (UID: \"a954a8de-2cdb-4648-a9a1-c21724ec103d\") " pod="openshift-marketplace/redhat-operators-dhmj2" Sep 29 10:31:56 crc kubenswrapper[4779]: I0929 10:31:56.291357 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-twj86\" (UniqueName: \"kubernetes.io/projected/a954a8de-2cdb-4648-a9a1-c21724ec103d-kube-api-access-twj86\") pod \"redhat-operators-dhmj2\" (UID: \"a954a8de-2cdb-4648-a9a1-c21724ec103d\") " pod="openshift-marketplace/redhat-operators-dhmj2" Sep 29 10:31:56 crc kubenswrapper[4779]: I0929 10:31:56.415422 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dhmj2" Sep 29 10:31:57 crc kubenswrapper[4779]: I0929 10:31:57.059352 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-dhmj2"] Sep 29 10:31:57 crc kubenswrapper[4779]: I0929 10:31:57.275810 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dhmj2" event={"ID":"a954a8de-2cdb-4648-a9a1-c21724ec103d","Type":"ContainerStarted","Data":"429ae288a3b5c42fe0764637854255d1bbf595de15e35633541bc30c094d84f0"} Sep 29 10:31:58 crc kubenswrapper[4779]: I0929 10:31:58.292524 4779 generic.go:334] "Generic (PLEG): container finished" podID="a954a8de-2cdb-4648-a9a1-c21724ec103d" containerID="489314d8b9508d7cdf89d472841a50125b7e9399b8c267c37bebc3090b73cd16" exitCode=0 Sep 29 10:31:58 crc kubenswrapper[4779]: I0929 10:31:58.292622 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dhmj2" event={"ID":"a954a8de-2cdb-4648-a9a1-c21724ec103d","Type":"ContainerDied","Data":"489314d8b9508d7cdf89d472841a50125b7e9399b8c267c37bebc3090b73cd16"} Sep 29 10:31:59 crc kubenswrapper[4779]: I0929 10:31:59.308299 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dhmj2" event={"ID":"a954a8de-2cdb-4648-a9a1-c21724ec103d","Type":"ContainerStarted","Data":"4cb4de7d59f00952d5bfddcec3497d761e9c7a674040dd9b5b6e2467c7dda50d"} Sep 29 10:32:01 crc kubenswrapper[4779]: I0929 10:32:01.339593 4779 generic.go:334] "Generic (PLEG): container finished" podID="a954a8de-2cdb-4648-a9a1-c21724ec103d" containerID="4cb4de7d59f00952d5bfddcec3497d761e9c7a674040dd9b5b6e2467c7dda50d" exitCode=0 Sep 29 10:32:01 crc kubenswrapper[4779]: I0929 10:32:01.339772 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dhmj2" event={"ID":"a954a8de-2cdb-4648-a9a1-c21724ec103d","Type":"ContainerDied","Data":"4cb4de7d59f00952d5bfddcec3497d761e9c7a674040dd9b5b6e2467c7dda50d"} Sep 29 10:32:02 crc kubenswrapper[4779]: I0929 10:32:02.351366 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dhmj2" event={"ID":"a954a8de-2cdb-4648-a9a1-c21724ec103d","Type":"ContainerStarted","Data":"00773bf0f7f6007f1e9466634d5bb2da86a88b6125b8593177e92721a510ce78"} Sep 29 10:32:02 crc kubenswrapper[4779]: I0929 10:32:02.376041 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-dhmj2" podStartSLOduration=2.915852031 podStartE2EDuration="6.376012838s" podCreationTimestamp="2025-09-29 10:31:56 +0000 UTC" firstStartedPulling="2025-09-29 10:31:58.29569933 +0000 UTC m=+3750.277023234" lastFinishedPulling="2025-09-29 10:32:01.755860087 +0000 UTC m=+3753.737184041" observedRunningTime="2025-09-29 10:32:02.369372527 +0000 UTC m=+3754.350696431" watchObservedRunningTime="2025-09-29 10:32:02.376012838 +0000 UTC m=+3754.357336752" Sep 29 10:32:06 crc kubenswrapper[4779]: I0929 10:32:06.416197 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-dhmj2" Sep 29 10:32:06 crc kubenswrapper[4779]: I0929 10:32:06.417075 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-dhmj2" Sep 29 10:32:06 crc kubenswrapper[4779]: I0929 10:32:06.556115 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-dhmj2" Sep 29 10:32:07 crc kubenswrapper[4779]: I0929 10:32:07.446734 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-dhmj2" Sep 29 10:32:08 crc kubenswrapper[4779]: I0929 10:32:08.476450 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-dhmj2"] Sep 29 10:32:10 crc kubenswrapper[4779]: I0929 10:32:10.431015 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-dhmj2" podUID="a954a8de-2cdb-4648-a9a1-c21724ec103d" containerName="registry-server" containerID="cri-o://00773bf0f7f6007f1e9466634d5bb2da86a88b6125b8593177e92721a510ce78" gracePeriod=2 Sep 29 10:32:10 crc kubenswrapper[4779]: I0929 10:32:10.957722 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dhmj2" Sep 29 10:32:11 crc kubenswrapper[4779]: I0929 10:32:11.087466 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a954a8de-2cdb-4648-a9a1-c21724ec103d-utilities\") pod \"a954a8de-2cdb-4648-a9a1-c21724ec103d\" (UID: \"a954a8de-2cdb-4648-a9a1-c21724ec103d\") " Sep 29 10:32:11 crc kubenswrapper[4779]: I0929 10:32:11.087807 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-twj86\" (UniqueName: \"kubernetes.io/projected/a954a8de-2cdb-4648-a9a1-c21724ec103d-kube-api-access-twj86\") pod \"a954a8de-2cdb-4648-a9a1-c21724ec103d\" (UID: \"a954a8de-2cdb-4648-a9a1-c21724ec103d\") " Sep 29 10:32:11 crc kubenswrapper[4779]: I0929 10:32:11.087911 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a954a8de-2cdb-4648-a9a1-c21724ec103d-catalog-content\") pod \"a954a8de-2cdb-4648-a9a1-c21724ec103d\" (UID: \"a954a8de-2cdb-4648-a9a1-c21724ec103d\") " Sep 29 10:32:11 crc kubenswrapper[4779]: I0929 10:32:11.088644 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a954a8de-2cdb-4648-a9a1-c21724ec103d-utilities" (OuterVolumeSpecName: "utilities") pod "a954a8de-2cdb-4648-a9a1-c21724ec103d" (UID: "a954a8de-2cdb-4648-a9a1-c21724ec103d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 10:32:11 crc kubenswrapper[4779]: I0929 10:32:11.102815 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a954a8de-2cdb-4648-a9a1-c21724ec103d-kube-api-access-twj86" (OuterVolumeSpecName: "kube-api-access-twj86") pod "a954a8de-2cdb-4648-a9a1-c21724ec103d" (UID: "a954a8de-2cdb-4648-a9a1-c21724ec103d"). InnerVolumeSpecName "kube-api-access-twj86". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 10:32:11 crc kubenswrapper[4779]: I0929 10:32:11.177315 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a954a8de-2cdb-4648-a9a1-c21724ec103d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a954a8de-2cdb-4648-a9a1-c21724ec103d" (UID: "a954a8de-2cdb-4648-a9a1-c21724ec103d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 10:32:11 crc kubenswrapper[4779]: I0929 10:32:11.190260 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a954a8de-2cdb-4648-a9a1-c21724ec103d-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 10:32:11 crc kubenswrapper[4779]: I0929 10:32:11.190331 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-twj86\" (UniqueName: \"kubernetes.io/projected/a954a8de-2cdb-4648-a9a1-c21724ec103d-kube-api-access-twj86\") on node \"crc\" DevicePath \"\"" Sep 29 10:32:11 crc kubenswrapper[4779]: I0929 10:32:11.190347 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a954a8de-2cdb-4648-a9a1-c21724ec103d-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 10:32:11 crc kubenswrapper[4779]: I0929 10:32:11.446022 4779 generic.go:334] "Generic (PLEG): container finished" podID="a954a8de-2cdb-4648-a9a1-c21724ec103d" containerID="00773bf0f7f6007f1e9466634d5bb2da86a88b6125b8593177e92721a510ce78" exitCode=0 Sep 29 10:32:11 crc kubenswrapper[4779]: I0929 10:32:11.446061 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dhmj2" event={"ID":"a954a8de-2cdb-4648-a9a1-c21724ec103d","Type":"ContainerDied","Data":"00773bf0f7f6007f1e9466634d5bb2da86a88b6125b8593177e92721a510ce78"} Sep 29 10:32:11 crc kubenswrapper[4779]: I0929 10:32:11.446107 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dhmj2" event={"ID":"a954a8de-2cdb-4648-a9a1-c21724ec103d","Type":"ContainerDied","Data":"429ae288a3b5c42fe0764637854255d1bbf595de15e35633541bc30c094d84f0"} Sep 29 10:32:11 crc kubenswrapper[4779]: I0929 10:32:11.446129 4779 scope.go:117] "RemoveContainer" containerID="00773bf0f7f6007f1e9466634d5bb2da86a88b6125b8593177e92721a510ce78" Sep 29 10:32:11 crc kubenswrapper[4779]: I0929 10:32:11.446938 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dhmj2" Sep 29 10:32:11 crc kubenswrapper[4779]: I0929 10:32:11.490232 4779 scope.go:117] "RemoveContainer" containerID="4cb4de7d59f00952d5bfddcec3497d761e9c7a674040dd9b5b6e2467c7dda50d" Sep 29 10:32:11 crc kubenswrapper[4779]: I0929 10:32:11.498720 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-dhmj2"] Sep 29 10:32:11 crc kubenswrapper[4779]: I0929 10:32:11.507301 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-dhmj2"] Sep 29 10:32:11 crc kubenswrapper[4779]: I0929 10:32:11.528958 4779 scope.go:117] "RemoveContainer" containerID="489314d8b9508d7cdf89d472841a50125b7e9399b8c267c37bebc3090b73cd16" Sep 29 10:32:11 crc kubenswrapper[4779]: I0929 10:32:11.563622 4779 scope.go:117] "RemoveContainer" containerID="00773bf0f7f6007f1e9466634d5bb2da86a88b6125b8593177e92721a510ce78" Sep 29 10:32:11 crc kubenswrapper[4779]: E0929 10:32:11.564193 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"00773bf0f7f6007f1e9466634d5bb2da86a88b6125b8593177e92721a510ce78\": container with ID starting with 00773bf0f7f6007f1e9466634d5bb2da86a88b6125b8593177e92721a510ce78 not found: ID does not exist" containerID="00773bf0f7f6007f1e9466634d5bb2da86a88b6125b8593177e92721a510ce78" Sep 29 10:32:11 crc kubenswrapper[4779]: I0929 10:32:11.564230 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"00773bf0f7f6007f1e9466634d5bb2da86a88b6125b8593177e92721a510ce78"} err="failed to get container status \"00773bf0f7f6007f1e9466634d5bb2da86a88b6125b8593177e92721a510ce78\": rpc error: code = NotFound desc = could not find container \"00773bf0f7f6007f1e9466634d5bb2da86a88b6125b8593177e92721a510ce78\": container with ID starting with 00773bf0f7f6007f1e9466634d5bb2da86a88b6125b8593177e92721a510ce78 not found: ID does not exist" Sep 29 10:32:11 crc kubenswrapper[4779]: I0929 10:32:11.564253 4779 scope.go:117] "RemoveContainer" containerID="4cb4de7d59f00952d5bfddcec3497d761e9c7a674040dd9b5b6e2467c7dda50d" Sep 29 10:32:11 crc kubenswrapper[4779]: E0929 10:32:11.564586 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4cb4de7d59f00952d5bfddcec3497d761e9c7a674040dd9b5b6e2467c7dda50d\": container with ID starting with 4cb4de7d59f00952d5bfddcec3497d761e9c7a674040dd9b5b6e2467c7dda50d not found: ID does not exist" containerID="4cb4de7d59f00952d5bfddcec3497d761e9c7a674040dd9b5b6e2467c7dda50d" Sep 29 10:32:11 crc kubenswrapper[4779]: I0929 10:32:11.564614 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4cb4de7d59f00952d5bfddcec3497d761e9c7a674040dd9b5b6e2467c7dda50d"} err="failed to get container status \"4cb4de7d59f00952d5bfddcec3497d761e9c7a674040dd9b5b6e2467c7dda50d\": rpc error: code = NotFound desc = could not find container \"4cb4de7d59f00952d5bfddcec3497d761e9c7a674040dd9b5b6e2467c7dda50d\": container with ID starting with 4cb4de7d59f00952d5bfddcec3497d761e9c7a674040dd9b5b6e2467c7dda50d not found: ID does not exist" Sep 29 10:32:11 crc kubenswrapper[4779]: I0929 10:32:11.564631 4779 scope.go:117] "RemoveContainer" containerID="489314d8b9508d7cdf89d472841a50125b7e9399b8c267c37bebc3090b73cd16" Sep 29 10:32:11 crc kubenswrapper[4779]: E0929 10:32:11.564882 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"489314d8b9508d7cdf89d472841a50125b7e9399b8c267c37bebc3090b73cd16\": container with ID starting with 489314d8b9508d7cdf89d472841a50125b7e9399b8c267c37bebc3090b73cd16 not found: ID does not exist" containerID="489314d8b9508d7cdf89d472841a50125b7e9399b8c267c37bebc3090b73cd16" Sep 29 10:32:11 crc kubenswrapper[4779]: I0929 10:32:11.564903 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"489314d8b9508d7cdf89d472841a50125b7e9399b8c267c37bebc3090b73cd16"} err="failed to get container status \"489314d8b9508d7cdf89d472841a50125b7e9399b8c267c37bebc3090b73cd16\": rpc error: code = NotFound desc = could not find container \"489314d8b9508d7cdf89d472841a50125b7e9399b8c267c37bebc3090b73cd16\": container with ID starting with 489314d8b9508d7cdf89d472841a50125b7e9399b8c267c37bebc3090b73cd16 not found: ID does not exist" Sep 29 10:32:12 crc kubenswrapper[4779]: I0929 10:32:12.728008 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a954a8de-2cdb-4648-a9a1-c21724ec103d" path="/var/lib/kubelet/pods/a954a8de-2cdb-4648-a9a1-c21724ec103d/volumes" Sep 29 10:32:46 crc kubenswrapper[4779]: I0929 10:32:46.966233 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 10:32:46 crc kubenswrapper[4779]: I0929 10:32:46.966833 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 10:32:49 crc kubenswrapper[4779]: I0929 10:32:49.689821 4779 scope.go:117] "RemoveContainer" containerID="b966ac2916afda7757e87ae2c3995f3765c4f35b6d76fd55b83ebcd6af5b33ac" Sep 29 10:32:50 crc kubenswrapper[4779]: I0929 10:32:50.137825 4779 scope.go:117] "RemoveContainer" containerID="dc91e1fb322963176eee25b6bd990d318149a0d9db7b2bb750906d037dae2d81" Sep 29 10:32:50 crc kubenswrapper[4779]: I0929 10:32:50.186324 4779 scope.go:117] "RemoveContainer" containerID="20ad142282b3f2953305bcc96b6cd7fffe14a97cdd4de499abfb1fca9aded3d3" Sep 29 10:32:50 crc kubenswrapper[4779]: I0929 10:32:50.235757 4779 scope.go:117] "RemoveContainer" containerID="b2f388e3b42d31c98a040b9fec73f34e5254f450c56538390319c51048da6920" Sep 29 10:33:16 crc kubenswrapper[4779]: I0929 10:33:16.966331 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 10:33:16 crc kubenswrapper[4779]: I0929 10:33:16.966949 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 10:33:46 crc kubenswrapper[4779]: I0929 10:33:46.966608 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 10:33:46 crc kubenswrapper[4779]: I0929 10:33:46.967091 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 10:33:46 crc kubenswrapper[4779]: I0929 10:33:46.967137 4779 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" Sep 29 10:33:46 crc kubenswrapper[4779]: I0929 10:33:46.967723 4779 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8eba7758df3801d50f0b472b05fc6e75b74a3d2186b90b898f737c3e8f2b5401"} pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 10:33:46 crc kubenswrapper[4779]: I0929 10:33:46.967782 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" containerID="cri-o://8eba7758df3801d50f0b472b05fc6e75b74a3d2186b90b898f737c3e8f2b5401" gracePeriod=600 Sep 29 10:33:47 crc kubenswrapper[4779]: E0929 10:33:47.105683 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:33:47 crc kubenswrapper[4779]: I0929 10:33:47.436763 4779 generic.go:334] "Generic (PLEG): container finished" podID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerID="8eba7758df3801d50f0b472b05fc6e75b74a3d2186b90b898f737c3e8f2b5401" exitCode=0 Sep 29 10:33:47 crc kubenswrapper[4779]: I0929 10:33:47.436834 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" event={"ID":"f1a5d3a7-37d9-4a87-864c-e4af7f504a19","Type":"ContainerDied","Data":"8eba7758df3801d50f0b472b05fc6e75b74a3d2186b90b898f737c3e8f2b5401"} Sep 29 10:33:47 crc kubenswrapper[4779]: I0929 10:33:47.436922 4779 scope.go:117] "RemoveContainer" containerID="860d5151671128f235100da6f6666ce427faedc693e288aca6ad696cc5d85668" Sep 29 10:33:47 crc kubenswrapper[4779]: I0929 10:33:47.438182 4779 scope.go:117] "RemoveContainer" containerID="8eba7758df3801d50f0b472b05fc6e75b74a3d2186b90b898f737c3e8f2b5401" Sep 29 10:33:47 crc kubenswrapper[4779]: E0929 10:33:47.439368 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:34:01 crc kubenswrapper[4779]: I0929 10:34:01.715134 4779 scope.go:117] "RemoveContainer" containerID="8eba7758df3801d50f0b472b05fc6e75b74a3d2186b90b898f737c3e8f2b5401" Sep 29 10:34:01 crc kubenswrapper[4779]: E0929 10:34:01.725187 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:34:14 crc kubenswrapper[4779]: I0929 10:34:14.715020 4779 scope.go:117] "RemoveContainer" containerID="8eba7758df3801d50f0b472b05fc6e75b74a3d2186b90b898f737c3e8f2b5401" Sep 29 10:34:14 crc kubenswrapper[4779]: E0929 10:34:14.715891 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:34:26 crc kubenswrapper[4779]: I0929 10:34:26.716046 4779 scope.go:117] "RemoveContainer" containerID="8eba7758df3801d50f0b472b05fc6e75b74a3d2186b90b898f737c3e8f2b5401" Sep 29 10:34:26 crc kubenswrapper[4779]: E0929 10:34:26.716803 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:34:40 crc kubenswrapper[4779]: I0929 10:34:40.715690 4779 scope.go:117] "RemoveContainer" containerID="8eba7758df3801d50f0b472b05fc6e75b74a3d2186b90b898f737c3e8f2b5401" Sep 29 10:34:40 crc kubenswrapper[4779]: E0929 10:34:40.717104 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:34:53 crc kubenswrapper[4779]: I0929 10:34:53.714463 4779 scope.go:117] "RemoveContainer" containerID="8eba7758df3801d50f0b472b05fc6e75b74a3d2186b90b898f737c3e8f2b5401" Sep 29 10:34:53 crc kubenswrapper[4779]: E0929 10:34:53.715341 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:35:04 crc kubenswrapper[4779]: I0929 10:35:04.714604 4779 scope.go:117] "RemoveContainer" containerID="8eba7758df3801d50f0b472b05fc6e75b74a3d2186b90b898f737c3e8f2b5401" Sep 29 10:35:04 crc kubenswrapper[4779]: E0929 10:35:04.715688 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:35:15 crc kubenswrapper[4779]: I0929 10:35:15.714081 4779 scope.go:117] "RemoveContainer" containerID="8eba7758df3801d50f0b472b05fc6e75b74a3d2186b90b898f737c3e8f2b5401" Sep 29 10:35:15 crc kubenswrapper[4779]: E0929 10:35:15.714749 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:35:26 crc kubenswrapper[4779]: I0929 10:35:26.734957 4779 scope.go:117] "RemoveContainer" containerID="8eba7758df3801d50f0b472b05fc6e75b74a3d2186b90b898f737c3e8f2b5401" Sep 29 10:35:26 crc kubenswrapper[4779]: E0929 10:35:26.736280 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:35:37 crc kubenswrapper[4779]: I0929 10:35:37.716134 4779 scope.go:117] "RemoveContainer" containerID="8eba7758df3801d50f0b472b05fc6e75b74a3d2186b90b898f737c3e8f2b5401" Sep 29 10:35:37 crc kubenswrapper[4779]: E0929 10:35:37.718617 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:35:49 crc kubenswrapper[4779]: I0929 10:35:49.714706 4779 scope.go:117] "RemoveContainer" containerID="8eba7758df3801d50f0b472b05fc6e75b74a3d2186b90b898f737c3e8f2b5401" Sep 29 10:35:49 crc kubenswrapper[4779]: E0929 10:35:49.715460 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:36:03 crc kubenswrapper[4779]: I0929 10:36:03.714793 4779 scope.go:117] "RemoveContainer" containerID="8eba7758df3801d50f0b472b05fc6e75b74a3d2186b90b898f737c3e8f2b5401" Sep 29 10:36:03 crc kubenswrapper[4779]: E0929 10:36:03.715617 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:36:16 crc kubenswrapper[4779]: I0929 10:36:16.715155 4779 scope.go:117] "RemoveContainer" containerID="8eba7758df3801d50f0b472b05fc6e75b74a3d2186b90b898f737c3e8f2b5401" Sep 29 10:36:16 crc kubenswrapper[4779]: E0929 10:36:16.716055 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:36:30 crc kubenswrapper[4779]: I0929 10:36:30.715012 4779 scope.go:117] "RemoveContainer" containerID="8eba7758df3801d50f0b472b05fc6e75b74a3d2186b90b898f737c3e8f2b5401" Sep 29 10:36:30 crc kubenswrapper[4779]: E0929 10:36:30.715834 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:36:44 crc kubenswrapper[4779]: I0929 10:36:44.714060 4779 scope.go:117] "RemoveContainer" containerID="8eba7758df3801d50f0b472b05fc6e75b74a3d2186b90b898f737c3e8f2b5401" Sep 29 10:36:44 crc kubenswrapper[4779]: E0929 10:36:44.714856 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:36:57 crc kubenswrapper[4779]: I0929 10:36:57.715054 4779 scope.go:117] "RemoveContainer" containerID="8eba7758df3801d50f0b472b05fc6e75b74a3d2186b90b898f737c3e8f2b5401" Sep 29 10:36:57 crc kubenswrapper[4779]: E0929 10:36:57.716897 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:37:10 crc kubenswrapper[4779]: I0929 10:37:10.714736 4779 scope.go:117] "RemoveContainer" containerID="8eba7758df3801d50f0b472b05fc6e75b74a3d2186b90b898f737c3e8f2b5401" Sep 29 10:37:10 crc kubenswrapper[4779]: E0929 10:37:10.715589 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:37:22 crc kubenswrapper[4779]: I0929 10:37:22.714456 4779 scope.go:117] "RemoveContainer" containerID="8eba7758df3801d50f0b472b05fc6e75b74a3d2186b90b898f737c3e8f2b5401" Sep 29 10:37:22 crc kubenswrapper[4779]: E0929 10:37:22.716215 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:37:36 crc kubenswrapper[4779]: I0929 10:37:36.714320 4779 scope.go:117] "RemoveContainer" containerID="8eba7758df3801d50f0b472b05fc6e75b74a3d2186b90b898f737c3e8f2b5401" Sep 29 10:37:36 crc kubenswrapper[4779]: E0929 10:37:36.715276 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:37:36 crc kubenswrapper[4779]: I0929 10:37:36.779560 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-8g66s"] Sep 29 10:37:36 crc kubenswrapper[4779]: E0929 10:37:36.780378 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a954a8de-2cdb-4648-a9a1-c21724ec103d" containerName="extract-utilities" Sep 29 10:37:36 crc kubenswrapper[4779]: I0929 10:37:36.780488 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="a954a8de-2cdb-4648-a9a1-c21724ec103d" containerName="extract-utilities" Sep 29 10:37:36 crc kubenswrapper[4779]: E0929 10:37:36.780575 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a954a8de-2cdb-4648-a9a1-c21724ec103d" containerName="extract-content" Sep 29 10:37:36 crc kubenswrapper[4779]: I0929 10:37:36.780645 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="a954a8de-2cdb-4648-a9a1-c21724ec103d" containerName="extract-content" Sep 29 10:37:36 crc kubenswrapper[4779]: E0929 10:37:36.780729 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a954a8de-2cdb-4648-a9a1-c21724ec103d" containerName="registry-server" Sep 29 10:37:36 crc kubenswrapper[4779]: I0929 10:37:36.780782 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="a954a8de-2cdb-4648-a9a1-c21724ec103d" containerName="registry-server" Sep 29 10:37:36 crc kubenswrapper[4779]: I0929 10:37:36.781064 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="a954a8de-2cdb-4648-a9a1-c21724ec103d" containerName="registry-server" Sep 29 10:37:36 crc kubenswrapper[4779]: I0929 10:37:36.782768 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8g66s" Sep 29 10:37:36 crc kubenswrapper[4779]: I0929 10:37:36.810110 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-8g66s"] Sep 29 10:37:36 crc kubenswrapper[4779]: I0929 10:37:36.941608 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe91baaa-d424-4e82-81ea-7ca078d037e5-catalog-content\") pod \"community-operators-8g66s\" (UID: \"fe91baaa-d424-4e82-81ea-7ca078d037e5\") " pod="openshift-marketplace/community-operators-8g66s" Sep 29 10:37:36 crc kubenswrapper[4779]: I0929 10:37:36.941815 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9h9g8\" (UniqueName: \"kubernetes.io/projected/fe91baaa-d424-4e82-81ea-7ca078d037e5-kube-api-access-9h9g8\") pod \"community-operators-8g66s\" (UID: \"fe91baaa-d424-4e82-81ea-7ca078d037e5\") " pod="openshift-marketplace/community-operators-8g66s" Sep 29 10:37:36 crc kubenswrapper[4779]: I0929 10:37:36.941877 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe91baaa-d424-4e82-81ea-7ca078d037e5-utilities\") pod \"community-operators-8g66s\" (UID: \"fe91baaa-d424-4e82-81ea-7ca078d037e5\") " pod="openshift-marketplace/community-operators-8g66s" Sep 29 10:37:37 crc kubenswrapper[4779]: I0929 10:37:37.043876 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe91baaa-d424-4e82-81ea-7ca078d037e5-utilities\") pod \"community-operators-8g66s\" (UID: \"fe91baaa-d424-4e82-81ea-7ca078d037e5\") " pod="openshift-marketplace/community-operators-8g66s" Sep 29 10:37:37 crc kubenswrapper[4779]: I0929 10:37:37.043980 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe91baaa-d424-4e82-81ea-7ca078d037e5-catalog-content\") pod \"community-operators-8g66s\" (UID: \"fe91baaa-d424-4e82-81ea-7ca078d037e5\") " pod="openshift-marketplace/community-operators-8g66s" Sep 29 10:37:37 crc kubenswrapper[4779]: I0929 10:37:37.044100 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9h9g8\" (UniqueName: \"kubernetes.io/projected/fe91baaa-d424-4e82-81ea-7ca078d037e5-kube-api-access-9h9g8\") pod \"community-operators-8g66s\" (UID: \"fe91baaa-d424-4e82-81ea-7ca078d037e5\") " pod="openshift-marketplace/community-operators-8g66s" Sep 29 10:37:37 crc kubenswrapper[4779]: I0929 10:37:37.044829 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe91baaa-d424-4e82-81ea-7ca078d037e5-utilities\") pod \"community-operators-8g66s\" (UID: \"fe91baaa-d424-4e82-81ea-7ca078d037e5\") " pod="openshift-marketplace/community-operators-8g66s" Sep 29 10:37:37 crc kubenswrapper[4779]: I0929 10:37:37.045023 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe91baaa-d424-4e82-81ea-7ca078d037e5-catalog-content\") pod \"community-operators-8g66s\" (UID: \"fe91baaa-d424-4e82-81ea-7ca078d037e5\") " pod="openshift-marketplace/community-operators-8g66s" Sep 29 10:37:37 crc kubenswrapper[4779]: I0929 10:37:37.098175 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9h9g8\" (UniqueName: \"kubernetes.io/projected/fe91baaa-d424-4e82-81ea-7ca078d037e5-kube-api-access-9h9g8\") pod \"community-operators-8g66s\" (UID: \"fe91baaa-d424-4e82-81ea-7ca078d037e5\") " pod="openshift-marketplace/community-operators-8g66s" Sep 29 10:37:37 crc kubenswrapper[4779]: I0929 10:37:37.115977 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8g66s" Sep 29 10:37:37 crc kubenswrapper[4779]: W0929 10:37:37.736892 4779 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfe91baaa_d424_4e82_81ea_7ca078d037e5.slice/crio-dc4fa29ae551bc153a38b960ec768b5388f7c82438287fda8ae426996eea1ff9 WatchSource:0}: Error finding container dc4fa29ae551bc153a38b960ec768b5388f7c82438287fda8ae426996eea1ff9: Status 404 returned error can't find the container with id dc4fa29ae551bc153a38b960ec768b5388f7c82438287fda8ae426996eea1ff9 Sep 29 10:37:37 crc kubenswrapper[4779]: I0929 10:37:37.738569 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-8g66s"] Sep 29 10:37:38 crc kubenswrapper[4779]: I0929 10:37:38.761764 4779 generic.go:334] "Generic (PLEG): container finished" podID="fe91baaa-d424-4e82-81ea-7ca078d037e5" containerID="c76d3178c45f388880c91c0de0cbe4984785bb4fd5be9e0087f4574c6a282538" exitCode=0 Sep 29 10:37:38 crc kubenswrapper[4779]: I0929 10:37:38.762213 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8g66s" event={"ID":"fe91baaa-d424-4e82-81ea-7ca078d037e5","Type":"ContainerDied","Data":"c76d3178c45f388880c91c0de0cbe4984785bb4fd5be9e0087f4574c6a282538"} Sep 29 10:37:38 crc kubenswrapper[4779]: I0929 10:37:38.765753 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8g66s" event={"ID":"fe91baaa-d424-4e82-81ea-7ca078d037e5","Type":"ContainerStarted","Data":"dc4fa29ae551bc153a38b960ec768b5388f7c82438287fda8ae426996eea1ff9"} Sep 29 10:37:38 crc kubenswrapper[4779]: I0929 10:37:38.768889 4779 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 10:37:39 crc kubenswrapper[4779]: I0929 10:37:39.778456 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8g66s" event={"ID":"fe91baaa-d424-4e82-81ea-7ca078d037e5","Type":"ContainerStarted","Data":"a55fb56d45b5c43f8e7b7972e8043dfaba907de350366ec92666d27de25d9143"} Sep 29 10:37:40 crc kubenswrapper[4779]: I0929 10:37:40.792663 4779 generic.go:334] "Generic (PLEG): container finished" podID="fe91baaa-d424-4e82-81ea-7ca078d037e5" containerID="a55fb56d45b5c43f8e7b7972e8043dfaba907de350366ec92666d27de25d9143" exitCode=0 Sep 29 10:37:40 crc kubenswrapper[4779]: I0929 10:37:40.792773 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8g66s" event={"ID":"fe91baaa-d424-4e82-81ea-7ca078d037e5","Type":"ContainerDied","Data":"a55fb56d45b5c43f8e7b7972e8043dfaba907de350366ec92666d27de25d9143"} Sep 29 10:37:42 crc kubenswrapper[4779]: I0929 10:37:42.826314 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8g66s" event={"ID":"fe91baaa-d424-4e82-81ea-7ca078d037e5","Type":"ContainerStarted","Data":"b7ef81b5f0271f46e06f72d5ab5a1e6e3e40e2144bf58641e0cebb92dcd4e13b"} Sep 29 10:37:42 crc kubenswrapper[4779]: I0929 10:37:42.866275 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-8g66s" podStartSLOduration=3.816932816 podStartE2EDuration="6.866239058s" podCreationTimestamp="2025-09-29 10:37:36 +0000 UTC" firstStartedPulling="2025-09-29 10:37:38.767823171 +0000 UTC m=+4090.749147085" lastFinishedPulling="2025-09-29 10:37:41.817129413 +0000 UTC m=+4093.798453327" observedRunningTime="2025-09-29 10:37:42.848829562 +0000 UTC m=+4094.830153466" watchObservedRunningTime="2025-09-29 10:37:42.866239058 +0000 UTC m=+4094.847563002" Sep 29 10:37:47 crc kubenswrapper[4779]: I0929 10:37:47.117002 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-8g66s" Sep 29 10:37:47 crc kubenswrapper[4779]: I0929 10:37:47.118682 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-8g66s" Sep 29 10:37:47 crc kubenswrapper[4779]: I0929 10:37:47.170182 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-8g66s" Sep 29 10:37:48 crc kubenswrapper[4779]: I0929 10:37:48.607487 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-8g66s" Sep 29 10:37:48 crc kubenswrapper[4779]: I0929 10:37:48.658222 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-8g66s"] Sep 29 10:37:49 crc kubenswrapper[4779]: I0929 10:37:49.895687 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-8g66s" podUID="fe91baaa-d424-4e82-81ea-7ca078d037e5" containerName="registry-server" containerID="cri-o://b7ef81b5f0271f46e06f72d5ab5a1e6e3e40e2144bf58641e0cebb92dcd4e13b" gracePeriod=2 Sep 29 10:37:50 crc kubenswrapper[4779]: I0929 10:37:50.479328 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8g66s" Sep 29 10:37:50 crc kubenswrapper[4779]: I0929 10:37:50.539848 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9h9g8\" (UniqueName: \"kubernetes.io/projected/fe91baaa-d424-4e82-81ea-7ca078d037e5-kube-api-access-9h9g8\") pod \"fe91baaa-d424-4e82-81ea-7ca078d037e5\" (UID: \"fe91baaa-d424-4e82-81ea-7ca078d037e5\") " Sep 29 10:37:50 crc kubenswrapper[4779]: I0929 10:37:50.539985 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe91baaa-d424-4e82-81ea-7ca078d037e5-catalog-content\") pod \"fe91baaa-d424-4e82-81ea-7ca078d037e5\" (UID: \"fe91baaa-d424-4e82-81ea-7ca078d037e5\") " Sep 29 10:37:50 crc kubenswrapper[4779]: I0929 10:37:50.540240 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe91baaa-d424-4e82-81ea-7ca078d037e5-utilities\") pod \"fe91baaa-d424-4e82-81ea-7ca078d037e5\" (UID: \"fe91baaa-d424-4e82-81ea-7ca078d037e5\") " Sep 29 10:37:50 crc kubenswrapper[4779]: I0929 10:37:50.541547 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fe91baaa-d424-4e82-81ea-7ca078d037e5-utilities" (OuterVolumeSpecName: "utilities") pod "fe91baaa-d424-4e82-81ea-7ca078d037e5" (UID: "fe91baaa-d424-4e82-81ea-7ca078d037e5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 10:37:50 crc kubenswrapper[4779]: I0929 10:37:50.547030 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fe91baaa-d424-4e82-81ea-7ca078d037e5-kube-api-access-9h9g8" (OuterVolumeSpecName: "kube-api-access-9h9g8") pod "fe91baaa-d424-4e82-81ea-7ca078d037e5" (UID: "fe91baaa-d424-4e82-81ea-7ca078d037e5"). InnerVolumeSpecName "kube-api-access-9h9g8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 10:37:50 crc kubenswrapper[4779]: I0929 10:37:50.619160 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fe91baaa-d424-4e82-81ea-7ca078d037e5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fe91baaa-d424-4e82-81ea-7ca078d037e5" (UID: "fe91baaa-d424-4e82-81ea-7ca078d037e5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 10:37:50 crc kubenswrapper[4779]: I0929 10:37:50.642635 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe91baaa-d424-4e82-81ea-7ca078d037e5-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 10:37:50 crc kubenswrapper[4779]: I0929 10:37:50.642684 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9h9g8\" (UniqueName: \"kubernetes.io/projected/fe91baaa-d424-4e82-81ea-7ca078d037e5-kube-api-access-9h9g8\") on node \"crc\" DevicePath \"\"" Sep 29 10:37:50 crc kubenswrapper[4779]: I0929 10:37:50.642699 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe91baaa-d424-4e82-81ea-7ca078d037e5-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 10:37:50 crc kubenswrapper[4779]: I0929 10:37:50.904866 4779 generic.go:334] "Generic (PLEG): container finished" podID="fe91baaa-d424-4e82-81ea-7ca078d037e5" containerID="b7ef81b5f0271f46e06f72d5ab5a1e6e3e40e2144bf58641e0cebb92dcd4e13b" exitCode=0 Sep 29 10:37:50 crc kubenswrapper[4779]: I0929 10:37:50.904972 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8g66s" event={"ID":"fe91baaa-d424-4e82-81ea-7ca078d037e5","Type":"ContainerDied","Data":"b7ef81b5f0271f46e06f72d5ab5a1e6e3e40e2144bf58641e0cebb92dcd4e13b"} Sep 29 10:37:50 crc kubenswrapper[4779]: I0929 10:37:50.905031 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8g66s" event={"ID":"fe91baaa-d424-4e82-81ea-7ca078d037e5","Type":"ContainerDied","Data":"dc4fa29ae551bc153a38b960ec768b5388f7c82438287fda8ae426996eea1ff9"} Sep 29 10:37:50 crc kubenswrapper[4779]: I0929 10:37:50.905037 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8g66s" Sep 29 10:37:50 crc kubenswrapper[4779]: I0929 10:37:50.905059 4779 scope.go:117] "RemoveContainer" containerID="b7ef81b5f0271f46e06f72d5ab5a1e6e3e40e2144bf58641e0cebb92dcd4e13b" Sep 29 10:37:50 crc kubenswrapper[4779]: I0929 10:37:50.932978 4779 scope.go:117] "RemoveContainer" containerID="a55fb56d45b5c43f8e7b7972e8043dfaba907de350366ec92666d27de25d9143" Sep 29 10:37:50 crc kubenswrapper[4779]: I0929 10:37:50.944581 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-8g66s"] Sep 29 10:37:50 crc kubenswrapper[4779]: I0929 10:37:50.954562 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-8g66s"] Sep 29 10:37:50 crc kubenswrapper[4779]: I0929 10:37:50.960084 4779 scope.go:117] "RemoveContainer" containerID="c76d3178c45f388880c91c0de0cbe4984785bb4fd5be9e0087f4574c6a282538" Sep 29 10:37:51 crc kubenswrapper[4779]: I0929 10:37:51.006508 4779 scope.go:117] "RemoveContainer" containerID="b7ef81b5f0271f46e06f72d5ab5a1e6e3e40e2144bf58641e0cebb92dcd4e13b" Sep 29 10:37:51 crc kubenswrapper[4779]: E0929 10:37:51.006885 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b7ef81b5f0271f46e06f72d5ab5a1e6e3e40e2144bf58641e0cebb92dcd4e13b\": container with ID starting with b7ef81b5f0271f46e06f72d5ab5a1e6e3e40e2144bf58641e0cebb92dcd4e13b not found: ID does not exist" containerID="b7ef81b5f0271f46e06f72d5ab5a1e6e3e40e2144bf58641e0cebb92dcd4e13b" Sep 29 10:37:51 crc kubenswrapper[4779]: I0929 10:37:51.006957 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b7ef81b5f0271f46e06f72d5ab5a1e6e3e40e2144bf58641e0cebb92dcd4e13b"} err="failed to get container status \"b7ef81b5f0271f46e06f72d5ab5a1e6e3e40e2144bf58641e0cebb92dcd4e13b\": rpc error: code = NotFound desc = could not find container \"b7ef81b5f0271f46e06f72d5ab5a1e6e3e40e2144bf58641e0cebb92dcd4e13b\": container with ID starting with b7ef81b5f0271f46e06f72d5ab5a1e6e3e40e2144bf58641e0cebb92dcd4e13b not found: ID does not exist" Sep 29 10:37:51 crc kubenswrapper[4779]: I0929 10:37:51.006991 4779 scope.go:117] "RemoveContainer" containerID="a55fb56d45b5c43f8e7b7972e8043dfaba907de350366ec92666d27de25d9143" Sep 29 10:37:51 crc kubenswrapper[4779]: E0929 10:37:51.007333 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a55fb56d45b5c43f8e7b7972e8043dfaba907de350366ec92666d27de25d9143\": container with ID starting with a55fb56d45b5c43f8e7b7972e8043dfaba907de350366ec92666d27de25d9143 not found: ID does not exist" containerID="a55fb56d45b5c43f8e7b7972e8043dfaba907de350366ec92666d27de25d9143" Sep 29 10:37:51 crc kubenswrapper[4779]: I0929 10:37:51.007377 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a55fb56d45b5c43f8e7b7972e8043dfaba907de350366ec92666d27de25d9143"} err="failed to get container status \"a55fb56d45b5c43f8e7b7972e8043dfaba907de350366ec92666d27de25d9143\": rpc error: code = NotFound desc = could not find container \"a55fb56d45b5c43f8e7b7972e8043dfaba907de350366ec92666d27de25d9143\": container with ID starting with a55fb56d45b5c43f8e7b7972e8043dfaba907de350366ec92666d27de25d9143 not found: ID does not exist" Sep 29 10:37:51 crc kubenswrapper[4779]: I0929 10:37:51.007406 4779 scope.go:117] "RemoveContainer" containerID="c76d3178c45f388880c91c0de0cbe4984785bb4fd5be9e0087f4574c6a282538" Sep 29 10:37:51 crc kubenswrapper[4779]: E0929 10:37:51.007767 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c76d3178c45f388880c91c0de0cbe4984785bb4fd5be9e0087f4574c6a282538\": container with ID starting with c76d3178c45f388880c91c0de0cbe4984785bb4fd5be9e0087f4574c6a282538 not found: ID does not exist" containerID="c76d3178c45f388880c91c0de0cbe4984785bb4fd5be9e0087f4574c6a282538" Sep 29 10:37:51 crc kubenswrapper[4779]: I0929 10:37:51.007811 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c76d3178c45f388880c91c0de0cbe4984785bb4fd5be9e0087f4574c6a282538"} err="failed to get container status \"c76d3178c45f388880c91c0de0cbe4984785bb4fd5be9e0087f4574c6a282538\": rpc error: code = NotFound desc = could not find container \"c76d3178c45f388880c91c0de0cbe4984785bb4fd5be9e0087f4574c6a282538\": container with ID starting with c76d3178c45f388880c91c0de0cbe4984785bb4fd5be9e0087f4574c6a282538 not found: ID does not exist" Sep 29 10:37:51 crc kubenswrapper[4779]: I0929 10:37:51.715039 4779 scope.go:117] "RemoveContainer" containerID="8eba7758df3801d50f0b472b05fc6e75b74a3d2186b90b898f737c3e8f2b5401" Sep 29 10:37:51 crc kubenswrapper[4779]: E0929 10:37:51.715452 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:37:52 crc kubenswrapper[4779]: I0929 10:37:52.729480 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fe91baaa-d424-4e82-81ea-7ca078d037e5" path="/var/lib/kubelet/pods/fe91baaa-d424-4e82-81ea-7ca078d037e5/volumes" Sep 29 10:38:02 crc kubenswrapper[4779]: I0929 10:38:02.714487 4779 scope.go:117] "RemoveContainer" containerID="8eba7758df3801d50f0b472b05fc6e75b74a3d2186b90b898f737c3e8f2b5401" Sep 29 10:38:02 crc kubenswrapper[4779]: E0929 10:38:02.715630 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:38:02 crc kubenswrapper[4779]: I0929 10:38:02.876591 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-6lvhv"] Sep 29 10:38:02 crc kubenswrapper[4779]: E0929 10:38:02.877799 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe91baaa-d424-4e82-81ea-7ca078d037e5" containerName="extract-content" Sep 29 10:38:02 crc kubenswrapper[4779]: I0929 10:38:02.877832 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe91baaa-d424-4e82-81ea-7ca078d037e5" containerName="extract-content" Sep 29 10:38:02 crc kubenswrapper[4779]: E0929 10:38:02.877860 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe91baaa-d424-4e82-81ea-7ca078d037e5" containerName="extract-utilities" Sep 29 10:38:02 crc kubenswrapper[4779]: I0929 10:38:02.877870 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe91baaa-d424-4e82-81ea-7ca078d037e5" containerName="extract-utilities" Sep 29 10:38:02 crc kubenswrapper[4779]: E0929 10:38:02.877927 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe91baaa-d424-4e82-81ea-7ca078d037e5" containerName="registry-server" Sep 29 10:38:02 crc kubenswrapper[4779]: I0929 10:38:02.877936 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe91baaa-d424-4e82-81ea-7ca078d037e5" containerName="registry-server" Sep 29 10:38:02 crc kubenswrapper[4779]: I0929 10:38:02.878212 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe91baaa-d424-4e82-81ea-7ca078d037e5" containerName="registry-server" Sep 29 10:38:02 crc kubenswrapper[4779]: I0929 10:38:02.879938 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6lvhv" Sep 29 10:38:02 crc kubenswrapper[4779]: I0929 10:38:02.892400 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6lvhv"] Sep 29 10:38:02 crc kubenswrapper[4779]: I0929 10:38:02.945770 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/170aabed-50a2-458e-9375-774a5be906bd-utilities\") pod \"certified-operators-6lvhv\" (UID: \"170aabed-50a2-458e-9375-774a5be906bd\") " pod="openshift-marketplace/certified-operators-6lvhv" Sep 29 10:38:02 crc kubenswrapper[4779]: I0929 10:38:02.945886 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nj65j\" (UniqueName: \"kubernetes.io/projected/170aabed-50a2-458e-9375-774a5be906bd-kube-api-access-nj65j\") pod \"certified-operators-6lvhv\" (UID: \"170aabed-50a2-458e-9375-774a5be906bd\") " pod="openshift-marketplace/certified-operators-6lvhv" Sep 29 10:38:02 crc kubenswrapper[4779]: I0929 10:38:02.946384 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/170aabed-50a2-458e-9375-774a5be906bd-catalog-content\") pod \"certified-operators-6lvhv\" (UID: \"170aabed-50a2-458e-9375-774a5be906bd\") " pod="openshift-marketplace/certified-operators-6lvhv" Sep 29 10:38:03 crc kubenswrapper[4779]: I0929 10:38:03.049840 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/170aabed-50a2-458e-9375-774a5be906bd-utilities\") pod \"certified-operators-6lvhv\" (UID: \"170aabed-50a2-458e-9375-774a5be906bd\") " pod="openshift-marketplace/certified-operators-6lvhv" Sep 29 10:38:03 crc kubenswrapper[4779]: I0929 10:38:03.050014 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nj65j\" (UniqueName: \"kubernetes.io/projected/170aabed-50a2-458e-9375-774a5be906bd-kube-api-access-nj65j\") pod \"certified-operators-6lvhv\" (UID: \"170aabed-50a2-458e-9375-774a5be906bd\") " pod="openshift-marketplace/certified-operators-6lvhv" Sep 29 10:38:03 crc kubenswrapper[4779]: I0929 10:38:03.050132 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/170aabed-50a2-458e-9375-774a5be906bd-catalog-content\") pod \"certified-operators-6lvhv\" (UID: \"170aabed-50a2-458e-9375-774a5be906bd\") " pod="openshift-marketplace/certified-operators-6lvhv" Sep 29 10:38:03 crc kubenswrapper[4779]: I0929 10:38:03.050719 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/170aabed-50a2-458e-9375-774a5be906bd-utilities\") pod \"certified-operators-6lvhv\" (UID: \"170aabed-50a2-458e-9375-774a5be906bd\") " pod="openshift-marketplace/certified-operators-6lvhv" Sep 29 10:38:03 crc kubenswrapper[4779]: I0929 10:38:03.050789 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/170aabed-50a2-458e-9375-774a5be906bd-catalog-content\") pod \"certified-operators-6lvhv\" (UID: \"170aabed-50a2-458e-9375-774a5be906bd\") " pod="openshift-marketplace/certified-operators-6lvhv" Sep 29 10:38:03 crc kubenswrapper[4779]: I0929 10:38:03.071622 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nj65j\" (UniqueName: \"kubernetes.io/projected/170aabed-50a2-458e-9375-774a5be906bd-kube-api-access-nj65j\") pod \"certified-operators-6lvhv\" (UID: \"170aabed-50a2-458e-9375-774a5be906bd\") " pod="openshift-marketplace/certified-operators-6lvhv" Sep 29 10:38:03 crc kubenswrapper[4779]: I0929 10:38:03.208474 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6lvhv" Sep 29 10:38:03 crc kubenswrapper[4779]: I0929 10:38:03.788693 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6lvhv"] Sep 29 10:38:04 crc kubenswrapper[4779]: I0929 10:38:04.057712 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6lvhv" event={"ID":"170aabed-50a2-458e-9375-774a5be906bd","Type":"ContainerStarted","Data":"6efa66fd812fc773e988f493dd1a2b64bcb01665a2fcebfec77a11f5975b5d64"} Sep 29 10:38:04 crc kubenswrapper[4779]: I0929 10:38:04.058165 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6lvhv" event={"ID":"170aabed-50a2-458e-9375-774a5be906bd","Type":"ContainerStarted","Data":"c27f425241a19aeb530bf27fba89bf852d7c618dd857d73363100f973745df10"} Sep 29 10:38:05 crc kubenswrapper[4779]: I0929 10:38:05.068891 4779 generic.go:334] "Generic (PLEG): container finished" podID="170aabed-50a2-458e-9375-774a5be906bd" containerID="6efa66fd812fc773e988f493dd1a2b64bcb01665a2fcebfec77a11f5975b5d64" exitCode=0 Sep 29 10:38:05 crc kubenswrapper[4779]: I0929 10:38:05.068952 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6lvhv" event={"ID":"170aabed-50a2-458e-9375-774a5be906bd","Type":"ContainerDied","Data":"6efa66fd812fc773e988f493dd1a2b64bcb01665a2fcebfec77a11f5975b5d64"} Sep 29 10:38:06 crc kubenswrapper[4779]: I0929 10:38:06.080133 4779 generic.go:334] "Generic (PLEG): container finished" podID="170aabed-50a2-458e-9375-774a5be906bd" containerID="77b378c5958098fd8a8c17ae51db0cc9f36312efdde6a1f40d9f21f67e5a7024" exitCode=0 Sep 29 10:38:06 crc kubenswrapper[4779]: I0929 10:38:06.080193 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6lvhv" event={"ID":"170aabed-50a2-458e-9375-774a5be906bd","Type":"ContainerDied","Data":"77b378c5958098fd8a8c17ae51db0cc9f36312efdde6a1f40d9f21f67e5a7024"} Sep 29 10:38:07 crc kubenswrapper[4779]: I0929 10:38:07.092760 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6lvhv" event={"ID":"170aabed-50a2-458e-9375-774a5be906bd","Type":"ContainerStarted","Data":"aa5c003bcf0e180069fd0298c737a1595fdad81fa199d5104514d7992abd4e57"} Sep 29 10:38:07 crc kubenswrapper[4779]: I0929 10:38:07.120237 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-6lvhv" podStartSLOduration=2.716341072 podStartE2EDuration="5.120213551s" podCreationTimestamp="2025-09-29 10:38:02 +0000 UTC" firstStartedPulling="2025-09-29 10:38:04.059790446 +0000 UTC m=+4116.041114350" lastFinishedPulling="2025-09-29 10:38:06.463662905 +0000 UTC m=+4118.444986829" observedRunningTime="2025-09-29 10:38:07.114020191 +0000 UTC m=+4119.095344095" watchObservedRunningTime="2025-09-29 10:38:07.120213551 +0000 UTC m=+4119.101537455" Sep 29 10:38:13 crc kubenswrapper[4779]: I0929 10:38:13.208741 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-6lvhv" Sep 29 10:38:13 crc kubenswrapper[4779]: I0929 10:38:13.209357 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-6lvhv" Sep 29 10:38:13 crc kubenswrapper[4779]: I0929 10:38:13.862181 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-6lvhv" Sep 29 10:38:14 crc kubenswrapper[4779]: I0929 10:38:14.229014 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-6lvhv" Sep 29 10:38:14 crc kubenswrapper[4779]: I0929 10:38:14.282277 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6lvhv"] Sep 29 10:38:14 crc kubenswrapper[4779]: I0929 10:38:14.715895 4779 scope.go:117] "RemoveContainer" containerID="8eba7758df3801d50f0b472b05fc6e75b74a3d2186b90b898f737c3e8f2b5401" Sep 29 10:38:14 crc kubenswrapper[4779]: E0929 10:38:14.716585 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:38:16 crc kubenswrapper[4779]: I0929 10:38:16.192507 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-6lvhv" podUID="170aabed-50a2-458e-9375-774a5be906bd" containerName="registry-server" containerID="cri-o://aa5c003bcf0e180069fd0298c737a1595fdad81fa199d5104514d7992abd4e57" gracePeriod=2 Sep 29 10:38:16 crc kubenswrapper[4779]: I0929 10:38:16.663124 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6lvhv" Sep 29 10:38:16 crc kubenswrapper[4779]: I0929 10:38:16.767739 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/170aabed-50a2-458e-9375-774a5be906bd-utilities\") pod \"170aabed-50a2-458e-9375-774a5be906bd\" (UID: \"170aabed-50a2-458e-9375-774a5be906bd\") " Sep 29 10:38:16 crc kubenswrapper[4779]: I0929 10:38:16.768052 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nj65j\" (UniqueName: \"kubernetes.io/projected/170aabed-50a2-458e-9375-774a5be906bd-kube-api-access-nj65j\") pod \"170aabed-50a2-458e-9375-774a5be906bd\" (UID: \"170aabed-50a2-458e-9375-774a5be906bd\") " Sep 29 10:38:16 crc kubenswrapper[4779]: I0929 10:38:16.768088 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/170aabed-50a2-458e-9375-774a5be906bd-catalog-content\") pod \"170aabed-50a2-458e-9375-774a5be906bd\" (UID: \"170aabed-50a2-458e-9375-774a5be906bd\") " Sep 29 10:38:16 crc kubenswrapper[4779]: I0929 10:38:16.768898 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/170aabed-50a2-458e-9375-774a5be906bd-utilities" (OuterVolumeSpecName: "utilities") pod "170aabed-50a2-458e-9375-774a5be906bd" (UID: "170aabed-50a2-458e-9375-774a5be906bd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 10:38:16 crc kubenswrapper[4779]: I0929 10:38:16.774999 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/170aabed-50a2-458e-9375-774a5be906bd-kube-api-access-nj65j" (OuterVolumeSpecName: "kube-api-access-nj65j") pod "170aabed-50a2-458e-9375-774a5be906bd" (UID: "170aabed-50a2-458e-9375-774a5be906bd"). InnerVolumeSpecName "kube-api-access-nj65j". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 10:38:16 crc kubenswrapper[4779]: I0929 10:38:16.813872 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/170aabed-50a2-458e-9375-774a5be906bd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "170aabed-50a2-458e-9375-774a5be906bd" (UID: "170aabed-50a2-458e-9375-774a5be906bd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 10:38:16 crc kubenswrapper[4779]: I0929 10:38:16.871636 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/170aabed-50a2-458e-9375-774a5be906bd-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 10:38:16 crc kubenswrapper[4779]: I0929 10:38:16.871690 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nj65j\" (UniqueName: \"kubernetes.io/projected/170aabed-50a2-458e-9375-774a5be906bd-kube-api-access-nj65j\") on node \"crc\" DevicePath \"\"" Sep 29 10:38:16 crc kubenswrapper[4779]: I0929 10:38:16.871703 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/170aabed-50a2-458e-9375-774a5be906bd-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 10:38:17 crc kubenswrapper[4779]: I0929 10:38:17.203357 4779 generic.go:334] "Generic (PLEG): container finished" podID="170aabed-50a2-458e-9375-774a5be906bd" containerID="aa5c003bcf0e180069fd0298c737a1595fdad81fa199d5104514d7992abd4e57" exitCode=0 Sep 29 10:38:17 crc kubenswrapper[4779]: I0929 10:38:17.203405 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6lvhv" event={"ID":"170aabed-50a2-458e-9375-774a5be906bd","Type":"ContainerDied","Data":"aa5c003bcf0e180069fd0298c737a1595fdad81fa199d5104514d7992abd4e57"} Sep 29 10:38:17 crc kubenswrapper[4779]: I0929 10:38:17.203441 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6lvhv" event={"ID":"170aabed-50a2-458e-9375-774a5be906bd","Type":"ContainerDied","Data":"c27f425241a19aeb530bf27fba89bf852d7c618dd857d73363100f973745df10"} Sep 29 10:38:17 crc kubenswrapper[4779]: I0929 10:38:17.203441 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6lvhv" Sep 29 10:38:17 crc kubenswrapper[4779]: I0929 10:38:17.203459 4779 scope.go:117] "RemoveContainer" containerID="aa5c003bcf0e180069fd0298c737a1595fdad81fa199d5104514d7992abd4e57" Sep 29 10:38:17 crc kubenswrapper[4779]: I0929 10:38:17.226717 4779 scope.go:117] "RemoveContainer" containerID="77b378c5958098fd8a8c17ae51db0cc9f36312efdde6a1f40d9f21f67e5a7024" Sep 29 10:38:17 crc kubenswrapper[4779]: I0929 10:38:17.248821 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6lvhv"] Sep 29 10:38:17 crc kubenswrapper[4779]: I0929 10:38:17.258359 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-6lvhv"] Sep 29 10:38:17 crc kubenswrapper[4779]: I0929 10:38:17.270350 4779 scope.go:117] "RemoveContainer" containerID="6efa66fd812fc773e988f493dd1a2b64bcb01665a2fcebfec77a11f5975b5d64" Sep 29 10:38:17 crc kubenswrapper[4779]: I0929 10:38:17.304064 4779 scope.go:117] "RemoveContainer" containerID="aa5c003bcf0e180069fd0298c737a1595fdad81fa199d5104514d7992abd4e57" Sep 29 10:38:17 crc kubenswrapper[4779]: E0929 10:38:17.304955 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aa5c003bcf0e180069fd0298c737a1595fdad81fa199d5104514d7992abd4e57\": container with ID starting with aa5c003bcf0e180069fd0298c737a1595fdad81fa199d5104514d7992abd4e57 not found: ID does not exist" containerID="aa5c003bcf0e180069fd0298c737a1595fdad81fa199d5104514d7992abd4e57" Sep 29 10:38:17 crc kubenswrapper[4779]: I0929 10:38:17.305003 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa5c003bcf0e180069fd0298c737a1595fdad81fa199d5104514d7992abd4e57"} err="failed to get container status \"aa5c003bcf0e180069fd0298c737a1595fdad81fa199d5104514d7992abd4e57\": rpc error: code = NotFound desc = could not find container \"aa5c003bcf0e180069fd0298c737a1595fdad81fa199d5104514d7992abd4e57\": container with ID starting with aa5c003bcf0e180069fd0298c737a1595fdad81fa199d5104514d7992abd4e57 not found: ID does not exist" Sep 29 10:38:17 crc kubenswrapper[4779]: I0929 10:38:17.305032 4779 scope.go:117] "RemoveContainer" containerID="77b378c5958098fd8a8c17ae51db0cc9f36312efdde6a1f40d9f21f67e5a7024" Sep 29 10:38:17 crc kubenswrapper[4779]: E0929 10:38:17.305534 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"77b378c5958098fd8a8c17ae51db0cc9f36312efdde6a1f40d9f21f67e5a7024\": container with ID starting with 77b378c5958098fd8a8c17ae51db0cc9f36312efdde6a1f40d9f21f67e5a7024 not found: ID does not exist" containerID="77b378c5958098fd8a8c17ae51db0cc9f36312efdde6a1f40d9f21f67e5a7024" Sep 29 10:38:17 crc kubenswrapper[4779]: I0929 10:38:17.306012 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"77b378c5958098fd8a8c17ae51db0cc9f36312efdde6a1f40d9f21f67e5a7024"} err="failed to get container status \"77b378c5958098fd8a8c17ae51db0cc9f36312efdde6a1f40d9f21f67e5a7024\": rpc error: code = NotFound desc = could not find container \"77b378c5958098fd8a8c17ae51db0cc9f36312efdde6a1f40d9f21f67e5a7024\": container with ID starting with 77b378c5958098fd8a8c17ae51db0cc9f36312efdde6a1f40d9f21f67e5a7024 not found: ID does not exist" Sep 29 10:38:17 crc kubenswrapper[4779]: I0929 10:38:17.306289 4779 scope.go:117] "RemoveContainer" containerID="6efa66fd812fc773e988f493dd1a2b64bcb01665a2fcebfec77a11f5975b5d64" Sep 29 10:38:17 crc kubenswrapper[4779]: E0929 10:38:17.306752 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6efa66fd812fc773e988f493dd1a2b64bcb01665a2fcebfec77a11f5975b5d64\": container with ID starting with 6efa66fd812fc773e988f493dd1a2b64bcb01665a2fcebfec77a11f5975b5d64 not found: ID does not exist" containerID="6efa66fd812fc773e988f493dd1a2b64bcb01665a2fcebfec77a11f5975b5d64" Sep 29 10:38:17 crc kubenswrapper[4779]: I0929 10:38:17.306841 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6efa66fd812fc773e988f493dd1a2b64bcb01665a2fcebfec77a11f5975b5d64"} err="failed to get container status \"6efa66fd812fc773e988f493dd1a2b64bcb01665a2fcebfec77a11f5975b5d64\": rpc error: code = NotFound desc = could not find container \"6efa66fd812fc773e988f493dd1a2b64bcb01665a2fcebfec77a11f5975b5d64\": container with ID starting with 6efa66fd812fc773e988f493dd1a2b64bcb01665a2fcebfec77a11f5975b5d64 not found: ID does not exist" Sep 29 10:38:18 crc kubenswrapper[4779]: I0929 10:38:18.729876 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="170aabed-50a2-458e-9375-774a5be906bd" path="/var/lib/kubelet/pods/170aabed-50a2-458e-9375-774a5be906bd/volumes" Sep 29 10:38:27 crc kubenswrapper[4779]: I0929 10:38:27.714002 4779 scope.go:117] "RemoveContainer" containerID="8eba7758df3801d50f0b472b05fc6e75b74a3d2186b90b898f737c3e8f2b5401" Sep 29 10:38:27 crc kubenswrapper[4779]: E0929 10:38:27.714886 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:38:42 crc kubenswrapper[4779]: I0929 10:38:42.715163 4779 scope.go:117] "RemoveContainer" containerID="8eba7758df3801d50f0b472b05fc6e75b74a3d2186b90b898f737c3e8f2b5401" Sep 29 10:38:42 crc kubenswrapper[4779]: E0929 10:38:42.716294 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:38:56 crc kubenswrapper[4779]: I0929 10:38:56.716193 4779 scope.go:117] "RemoveContainer" containerID="8eba7758df3801d50f0b472b05fc6e75b74a3d2186b90b898f737c3e8f2b5401" Sep 29 10:38:57 crc kubenswrapper[4779]: I0929 10:38:57.631763 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" event={"ID":"f1a5d3a7-37d9-4a87-864c-e4af7f504a19","Type":"ContainerStarted","Data":"86a2ac1679839d5c319699379380e594382e77f38d67d1764ae1bcceb4ae8cef"} Sep 29 10:38:58 crc kubenswrapper[4779]: I0929 10:38:58.350696 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-7h5kk"] Sep 29 10:38:58 crc kubenswrapper[4779]: E0929 10:38:58.352225 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="170aabed-50a2-458e-9375-774a5be906bd" containerName="registry-server" Sep 29 10:38:58 crc kubenswrapper[4779]: I0929 10:38:58.352251 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="170aabed-50a2-458e-9375-774a5be906bd" containerName="registry-server" Sep 29 10:38:58 crc kubenswrapper[4779]: E0929 10:38:58.352285 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="170aabed-50a2-458e-9375-774a5be906bd" containerName="extract-utilities" Sep 29 10:38:58 crc kubenswrapper[4779]: I0929 10:38:58.352298 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="170aabed-50a2-458e-9375-774a5be906bd" containerName="extract-utilities" Sep 29 10:38:58 crc kubenswrapper[4779]: E0929 10:38:58.352331 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="170aabed-50a2-458e-9375-774a5be906bd" containerName="extract-content" Sep 29 10:38:58 crc kubenswrapper[4779]: I0929 10:38:58.352342 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="170aabed-50a2-458e-9375-774a5be906bd" containerName="extract-content" Sep 29 10:38:58 crc kubenswrapper[4779]: I0929 10:38:58.352670 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="170aabed-50a2-458e-9375-774a5be906bd" containerName="registry-server" Sep 29 10:38:58 crc kubenswrapper[4779]: I0929 10:38:58.355406 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7h5kk" Sep 29 10:38:58 crc kubenswrapper[4779]: I0929 10:38:58.362853 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7h5kk"] Sep 29 10:38:58 crc kubenswrapper[4779]: I0929 10:38:58.524685 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf4292a5-35e8-47e8-bdc1-6e7af0784ec4-catalog-content\") pod \"redhat-marketplace-7h5kk\" (UID: \"cf4292a5-35e8-47e8-bdc1-6e7af0784ec4\") " pod="openshift-marketplace/redhat-marketplace-7h5kk" Sep 29 10:38:58 crc kubenswrapper[4779]: I0929 10:38:58.524854 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pnrmk\" (UniqueName: \"kubernetes.io/projected/cf4292a5-35e8-47e8-bdc1-6e7af0784ec4-kube-api-access-pnrmk\") pod \"redhat-marketplace-7h5kk\" (UID: \"cf4292a5-35e8-47e8-bdc1-6e7af0784ec4\") " pod="openshift-marketplace/redhat-marketplace-7h5kk" Sep 29 10:38:58 crc kubenswrapper[4779]: I0929 10:38:58.524888 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf4292a5-35e8-47e8-bdc1-6e7af0784ec4-utilities\") pod \"redhat-marketplace-7h5kk\" (UID: \"cf4292a5-35e8-47e8-bdc1-6e7af0784ec4\") " pod="openshift-marketplace/redhat-marketplace-7h5kk" Sep 29 10:38:58 crc kubenswrapper[4779]: I0929 10:38:58.626351 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf4292a5-35e8-47e8-bdc1-6e7af0784ec4-catalog-content\") pod \"redhat-marketplace-7h5kk\" (UID: \"cf4292a5-35e8-47e8-bdc1-6e7af0784ec4\") " pod="openshift-marketplace/redhat-marketplace-7h5kk" Sep 29 10:38:58 crc kubenswrapper[4779]: I0929 10:38:58.626530 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pnrmk\" (UniqueName: \"kubernetes.io/projected/cf4292a5-35e8-47e8-bdc1-6e7af0784ec4-kube-api-access-pnrmk\") pod \"redhat-marketplace-7h5kk\" (UID: \"cf4292a5-35e8-47e8-bdc1-6e7af0784ec4\") " pod="openshift-marketplace/redhat-marketplace-7h5kk" Sep 29 10:38:58 crc kubenswrapper[4779]: I0929 10:38:58.626560 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf4292a5-35e8-47e8-bdc1-6e7af0784ec4-utilities\") pod \"redhat-marketplace-7h5kk\" (UID: \"cf4292a5-35e8-47e8-bdc1-6e7af0784ec4\") " pod="openshift-marketplace/redhat-marketplace-7h5kk" Sep 29 10:38:58 crc kubenswrapper[4779]: I0929 10:38:58.627053 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf4292a5-35e8-47e8-bdc1-6e7af0784ec4-catalog-content\") pod \"redhat-marketplace-7h5kk\" (UID: \"cf4292a5-35e8-47e8-bdc1-6e7af0784ec4\") " pod="openshift-marketplace/redhat-marketplace-7h5kk" Sep 29 10:38:58 crc kubenswrapper[4779]: I0929 10:38:58.627153 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf4292a5-35e8-47e8-bdc1-6e7af0784ec4-utilities\") pod \"redhat-marketplace-7h5kk\" (UID: \"cf4292a5-35e8-47e8-bdc1-6e7af0784ec4\") " pod="openshift-marketplace/redhat-marketplace-7h5kk" Sep 29 10:38:58 crc kubenswrapper[4779]: I0929 10:38:58.646357 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pnrmk\" (UniqueName: \"kubernetes.io/projected/cf4292a5-35e8-47e8-bdc1-6e7af0784ec4-kube-api-access-pnrmk\") pod \"redhat-marketplace-7h5kk\" (UID: \"cf4292a5-35e8-47e8-bdc1-6e7af0784ec4\") " pod="openshift-marketplace/redhat-marketplace-7h5kk" Sep 29 10:38:58 crc kubenswrapper[4779]: I0929 10:38:58.699672 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7h5kk" Sep 29 10:38:59 crc kubenswrapper[4779]: I0929 10:38:59.290870 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7h5kk"] Sep 29 10:38:59 crc kubenswrapper[4779]: I0929 10:38:59.671861 4779 generic.go:334] "Generic (PLEG): container finished" podID="cf4292a5-35e8-47e8-bdc1-6e7af0784ec4" containerID="c2980b7fd1c8b6703b4463b5aec5d245cb1de18dd75a820c25e9478505205239" exitCode=0 Sep 29 10:38:59 crc kubenswrapper[4779]: I0929 10:38:59.672093 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7h5kk" event={"ID":"cf4292a5-35e8-47e8-bdc1-6e7af0784ec4","Type":"ContainerDied","Data":"c2980b7fd1c8b6703b4463b5aec5d245cb1de18dd75a820c25e9478505205239"} Sep 29 10:38:59 crc kubenswrapper[4779]: I0929 10:38:59.672275 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7h5kk" event={"ID":"cf4292a5-35e8-47e8-bdc1-6e7af0784ec4","Type":"ContainerStarted","Data":"888e7f57cfb08c19a9fb143db897f675e4330d5033237718e217dd387bdfdefe"} Sep 29 10:39:01 crc kubenswrapper[4779]: I0929 10:39:01.696848 4779 generic.go:334] "Generic (PLEG): container finished" podID="cf4292a5-35e8-47e8-bdc1-6e7af0784ec4" containerID="899c7b7fcadbfbd4920e863b55dbcc421ced691465971592300cf52b2fce603d" exitCode=0 Sep 29 10:39:01 crc kubenswrapper[4779]: I0929 10:39:01.697667 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7h5kk" event={"ID":"cf4292a5-35e8-47e8-bdc1-6e7af0784ec4","Type":"ContainerDied","Data":"899c7b7fcadbfbd4920e863b55dbcc421ced691465971592300cf52b2fce603d"} Sep 29 10:39:02 crc kubenswrapper[4779]: I0929 10:39:02.708880 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7h5kk" event={"ID":"cf4292a5-35e8-47e8-bdc1-6e7af0784ec4","Type":"ContainerStarted","Data":"990b247db93c3a91df9911ead4fb763b79ed05e4cc1b4ce72fb96bdee5ac4303"} Sep 29 10:39:02 crc kubenswrapper[4779]: I0929 10:39:02.734044 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-7h5kk" podStartSLOduration=2.244686954 podStartE2EDuration="4.734024759s" podCreationTimestamp="2025-09-29 10:38:58 +0000 UTC" firstStartedPulling="2025-09-29 10:38:59.674617853 +0000 UTC m=+4171.655941767" lastFinishedPulling="2025-09-29 10:39:02.163955668 +0000 UTC m=+4174.145279572" observedRunningTime="2025-09-29 10:39:02.729024254 +0000 UTC m=+4174.710348168" watchObservedRunningTime="2025-09-29 10:39:02.734024759 +0000 UTC m=+4174.715348663" Sep 29 10:39:08 crc kubenswrapper[4779]: I0929 10:39:08.700410 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-7h5kk" Sep 29 10:39:08 crc kubenswrapper[4779]: I0929 10:39:08.701692 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-7h5kk" Sep 29 10:39:08 crc kubenswrapper[4779]: I0929 10:39:08.757949 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-7h5kk" Sep 29 10:39:08 crc kubenswrapper[4779]: I0929 10:39:08.834777 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-7h5kk" Sep 29 10:39:09 crc kubenswrapper[4779]: I0929 10:39:09.003713 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7h5kk"] Sep 29 10:39:10 crc kubenswrapper[4779]: I0929 10:39:10.784848 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-7h5kk" podUID="cf4292a5-35e8-47e8-bdc1-6e7af0784ec4" containerName="registry-server" containerID="cri-o://990b247db93c3a91df9911ead4fb763b79ed05e4cc1b4ce72fb96bdee5ac4303" gracePeriod=2 Sep 29 10:39:11 crc kubenswrapper[4779]: I0929 10:39:11.256381 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7h5kk" Sep 29 10:39:11 crc kubenswrapper[4779]: I0929 10:39:11.331515 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf4292a5-35e8-47e8-bdc1-6e7af0784ec4-catalog-content\") pod \"cf4292a5-35e8-47e8-bdc1-6e7af0784ec4\" (UID: \"cf4292a5-35e8-47e8-bdc1-6e7af0784ec4\") " Sep 29 10:39:11 crc kubenswrapper[4779]: I0929 10:39:11.331732 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf4292a5-35e8-47e8-bdc1-6e7af0784ec4-utilities\") pod \"cf4292a5-35e8-47e8-bdc1-6e7af0784ec4\" (UID: \"cf4292a5-35e8-47e8-bdc1-6e7af0784ec4\") " Sep 29 10:39:11 crc kubenswrapper[4779]: I0929 10:39:11.331778 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pnrmk\" (UniqueName: \"kubernetes.io/projected/cf4292a5-35e8-47e8-bdc1-6e7af0784ec4-kube-api-access-pnrmk\") pod \"cf4292a5-35e8-47e8-bdc1-6e7af0784ec4\" (UID: \"cf4292a5-35e8-47e8-bdc1-6e7af0784ec4\") " Sep 29 10:39:11 crc kubenswrapper[4779]: I0929 10:39:11.333001 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cf4292a5-35e8-47e8-bdc1-6e7af0784ec4-utilities" (OuterVolumeSpecName: "utilities") pod "cf4292a5-35e8-47e8-bdc1-6e7af0784ec4" (UID: "cf4292a5-35e8-47e8-bdc1-6e7af0784ec4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 10:39:11 crc kubenswrapper[4779]: I0929 10:39:11.337977 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf4292a5-35e8-47e8-bdc1-6e7af0784ec4-kube-api-access-pnrmk" (OuterVolumeSpecName: "kube-api-access-pnrmk") pod "cf4292a5-35e8-47e8-bdc1-6e7af0784ec4" (UID: "cf4292a5-35e8-47e8-bdc1-6e7af0784ec4"). InnerVolumeSpecName "kube-api-access-pnrmk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 10:39:11 crc kubenswrapper[4779]: I0929 10:39:11.349063 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cf4292a5-35e8-47e8-bdc1-6e7af0784ec4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cf4292a5-35e8-47e8-bdc1-6e7af0784ec4" (UID: "cf4292a5-35e8-47e8-bdc1-6e7af0784ec4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 10:39:11 crc kubenswrapper[4779]: I0929 10:39:11.434671 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf4292a5-35e8-47e8-bdc1-6e7af0784ec4-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 10:39:11 crc kubenswrapper[4779]: I0929 10:39:11.434708 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf4292a5-35e8-47e8-bdc1-6e7af0784ec4-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 10:39:11 crc kubenswrapper[4779]: I0929 10:39:11.434718 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pnrmk\" (UniqueName: \"kubernetes.io/projected/cf4292a5-35e8-47e8-bdc1-6e7af0784ec4-kube-api-access-pnrmk\") on node \"crc\" DevicePath \"\"" Sep 29 10:39:11 crc kubenswrapper[4779]: I0929 10:39:11.801333 4779 generic.go:334] "Generic (PLEG): container finished" podID="cf4292a5-35e8-47e8-bdc1-6e7af0784ec4" containerID="990b247db93c3a91df9911ead4fb763b79ed05e4cc1b4ce72fb96bdee5ac4303" exitCode=0 Sep 29 10:39:11 crc kubenswrapper[4779]: I0929 10:39:11.801442 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7h5kk" Sep 29 10:39:11 crc kubenswrapper[4779]: I0929 10:39:11.801450 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7h5kk" event={"ID":"cf4292a5-35e8-47e8-bdc1-6e7af0784ec4","Type":"ContainerDied","Data":"990b247db93c3a91df9911ead4fb763b79ed05e4cc1b4ce72fb96bdee5ac4303"} Sep 29 10:39:11 crc kubenswrapper[4779]: I0929 10:39:11.803106 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7h5kk" event={"ID":"cf4292a5-35e8-47e8-bdc1-6e7af0784ec4","Type":"ContainerDied","Data":"888e7f57cfb08c19a9fb143db897f675e4330d5033237718e217dd387bdfdefe"} Sep 29 10:39:11 crc kubenswrapper[4779]: I0929 10:39:11.803210 4779 scope.go:117] "RemoveContainer" containerID="990b247db93c3a91df9911ead4fb763b79ed05e4cc1b4ce72fb96bdee5ac4303" Sep 29 10:39:11 crc kubenswrapper[4779]: I0929 10:39:11.830342 4779 scope.go:117] "RemoveContainer" containerID="899c7b7fcadbfbd4920e863b55dbcc421ced691465971592300cf52b2fce603d" Sep 29 10:39:11 crc kubenswrapper[4779]: I0929 10:39:11.851477 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7h5kk"] Sep 29 10:39:11 crc kubenswrapper[4779]: I0929 10:39:11.864143 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-7h5kk"] Sep 29 10:39:11 crc kubenswrapper[4779]: I0929 10:39:11.872105 4779 scope.go:117] "RemoveContainer" containerID="c2980b7fd1c8b6703b4463b5aec5d245cb1de18dd75a820c25e9478505205239" Sep 29 10:39:11 crc kubenswrapper[4779]: I0929 10:39:11.930623 4779 scope.go:117] "RemoveContainer" containerID="990b247db93c3a91df9911ead4fb763b79ed05e4cc1b4ce72fb96bdee5ac4303" Sep 29 10:39:11 crc kubenswrapper[4779]: E0929 10:39:11.931286 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"990b247db93c3a91df9911ead4fb763b79ed05e4cc1b4ce72fb96bdee5ac4303\": container with ID starting with 990b247db93c3a91df9911ead4fb763b79ed05e4cc1b4ce72fb96bdee5ac4303 not found: ID does not exist" containerID="990b247db93c3a91df9911ead4fb763b79ed05e4cc1b4ce72fb96bdee5ac4303" Sep 29 10:39:11 crc kubenswrapper[4779]: I0929 10:39:11.931347 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"990b247db93c3a91df9911ead4fb763b79ed05e4cc1b4ce72fb96bdee5ac4303"} err="failed to get container status \"990b247db93c3a91df9911ead4fb763b79ed05e4cc1b4ce72fb96bdee5ac4303\": rpc error: code = NotFound desc = could not find container \"990b247db93c3a91df9911ead4fb763b79ed05e4cc1b4ce72fb96bdee5ac4303\": container with ID starting with 990b247db93c3a91df9911ead4fb763b79ed05e4cc1b4ce72fb96bdee5ac4303 not found: ID does not exist" Sep 29 10:39:11 crc kubenswrapper[4779]: I0929 10:39:11.931393 4779 scope.go:117] "RemoveContainer" containerID="899c7b7fcadbfbd4920e863b55dbcc421ced691465971592300cf52b2fce603d" Sep 29 10:39:11 crc kubenswrapper[4779]: E0929 10:39:11.931868 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"899c7b7fcadbfbd4920e863b55dbcc421ced691465971592300cf52b2fce603d\": container with ID starting with 899c7b7fcadbfbd4920e863b55dbcc421ced691465971592300cf52b2fce603d not found: ID does not exist" containerID="899c7b7fcadbfbd4920e863b55dbcc421ced691465971592300cf52b2fce603d" Sep 29 10:39:11 crc kubenswrapper[4779]: I0929 10:39:11.931894 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"899c7b7fcadbfbd4920e863b55dbcc421ced691465971592300cf52b2fce603d"} err="failed to get container status \"899c7b7fcadbfbd4920e863b55dbcc421ced691465971592300cf52b2fce603d\": rpc error: code = NotFound desc = could not find container \"899c7b7fcadbfbd4920e863b55dbcc421ced691465971592300cf52b2fce603d\": container with ID starting with 899c7b7fcadbfbd4920e863b55dbcc421ced691465971592300cf52b2fce603d not found: ID does not exist" Sep 29 10:39:11 crc kubenswrapper[4779]: I0929 10:39:11.931930 4779 scope.go:117] "RemoveContainer" containerID="c2980b7fd1c8b6703b4463b5aec5d245cb1de18dd75a820c25e9478505205239" Sep 29 10:39:11 crc kubenswrapper[4779]: E0929 10:39:11.932555 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c2980b7fd1c8b6703b4463b5aec5d245cb1de18dd75a820c25e9478505205239\": container with ID starting with c2980b7fd1c8b6703b4463b5aec5d245cb1de18dd75a820c25e9478505205239 not found: ID does not exist" containerID="c2980b7fd1c8b6703b4463b5aec5d245cb1de18dd75a820c25e9478505205239" Sep 29 10:39:11 crc kubenswrapper[4779]: I0929 10:39:11.932602 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c2980b7fd1c8b6703b4463b5aec5d245cb1de18dd75a820c25e9478505205239"} err="failed to get container status \"c2980b7fd1c8b6703b4463b5aec5d245cb1de18dd75a820c25e9478505205239\": rpc error: code = NotFound desc = could not find container \"c2980b7fd1c8b6703b4463b5aec5d245cb1de18dd75a820c25e9478505205239\": container with ID starting with c2980b7fd1c8b6703b4463b5aec5d245cb1de18dd75a820c25e9478505205239 not found: ID does not exist" Sep 29 10:39:12 crc kubenswrapper[4779]: I0929 10:39:12.740625 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cf4292a5-35e8-47e8-bdc1-6e7af0784ec4" path="/var/lib/kubelet/pods/cf4292a5-35e8-47e8-bdc1-6e7af0784ec4/volumes" Sep 29 10:41:16 crc kubenswrapper[4779]: I0929 10:41:16.966748 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 10:41:16 crc kubenswrapper[4779]: I0929 10:41:16.967795 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 10:41:46 crc kubenswrapper[4779]: I0929 10:41:46.966827 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 10:41:46 crc kubenswrapper[4779]: I0929 10:41:46.967399 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 10:42:16 crc kubenswrapper[4779]: I0929 10:42:16.967161 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 10:42:16 crc kubenswrapper[4779]: I0929 10:42:16.968268 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 10:42:16 crc kubenswrapper[4779]: I0929 10:42:16.968358 4779 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" Sep 29 10:42:16 crc kubenswrapper[4779]: I0929 10:42:16.970060 4779 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"86a2ac1679839d5c319699379380e594382e77f38d67d1764ae1bcceb4ae8cef"} pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 10:42:16 crc kubenswrapper[4779]: I0929 10:42:16.970145 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" containerID="cri-o://86a2ac1679839d5c319699379380e594382e77f38d67d1764ae1bcceb4ae8cef" gracePeriod=600 Sep 29 10:42:17 crc kubenswrapper[4779]: I0929 10:42:17.741871 4779 generic.go:334] "Generic (PLEG): container finished" podID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerID="86a2ac1679839d5c319699379380e594382e77f38d67d1764ae1bcceb4ae8cef" exitCode=0 Sep 29 10:42:17 crc kubenswrapper[4779]: I0929 10:42:17.741943 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" event={"ID":"f1a5d3a7-37d9-4a87-864c-e4af7f504a19","Type":"ContainerDied","Data":"86a2ac1679839d5c319699379380e594382e77f38d67d1764ae1bcceb4ae8cef"} Sep 29 10:42:17 crc kubenswrapper[4779]: I0929 10:42:17.742556 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" event={"ID":"f1a5d3a7-37d9-4a87-864c-e4af7f504a19","Type":"ContainerStarted","Data":"1865f24d23363df0a5c4badf3f6f96c7f06b8d41471c03b2b2693d9d64bf4ee6"} Sep 29 10:42:17 crc kubenswrapper[4779]: I0929 10:42:17.742589 4779 scope.go:117] "RemoveContainer" containerID="8eba7758df3801d50f0b472b05fc6e75b74a3d2186b90b898f737c3e8f2b5401" Sep 29 10:42:21 crc kubenswrapper[4779]: I0929 10:42:21.420290 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-fj2z6"] Sep 29 10:42:21 crc kubenswrapper[4779]: E0929 10:42:21.422558 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf4292a5-35e8-47e8-bdc1-6e7af0784ec4" containerName="extract-utilities" Sep 29 10:42:21 crc kubenswrapper[4779]: I0929 10:42:21.422576 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf4292a5-35e8-47e8-bdc1-6e7af0784ec4" containerName="extract-utilities" Sep 29 10:42:21 crc kubenswrapper[4779]: E0929 10:42:21.422630 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf4292a5-35e8-47e8-bdc1-6e7af0784ec4" containerName="extract-content" Sep 29 10:42:21 crc kubenswrapper[4779]: I0929 10:42:21.422640 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf4292a5-35e8-47e8-bdc1-6e7af0784ec4" containerName="extract-content" Sep 29 10:42:21 crc kubenswrapper[4779]: E0929 10:42:21.422655 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf4292a5-35e8-47e8-bdc1-6e7af0784ec4" containerName="registry-server" Sep 29 10:42:21 crc kubenswrapper[4779]: I0929 10:42:21.422664 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf4292a5-35e8-47e8-bdc1-6e7af0784ec4" containerName="registry-server" Sep 29 10:42:21 crc kubenswrapper[4779]: I0929 10:42:21.422954 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf4292a5-35e8-47e8-bdc1-6e7af0784ec4" containerName="registry-server" Sep 29 10:42:21 crc kubenswrapper[4779]: I0929 10:42:21.425249 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fj2z6" Sep 29 10:42:21 crc kubenswrapper[4779]: I0929 10:42:21.433475 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fj2z6"] Sep 29 10:42:21 crc kubenswrapper[4779]: I0929 10:42:21.490370 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6ed86c9e-1efd-477d-9187-26b91bd90e93-catalog-content\") pod \"redhat-operators-fj2z6\" (UID: \"6ed86c9e-1efd-477d-9187-26b91bd90e93\") " pod="openshift-marketplace/redhat-operators-fj2z6" Sep 29 10:42:21 crc kubenswrapper[4779]: I0929 10:42:21.490618 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6ed86c9e-1efd-477d-9187-26b91bd90e93-utilities\") pod \"redhat-operators-fj2z6\" (UID: \"6ed86c9e-1efd-477d-9187-26b91bd90e93\") " pod="openshift-marketplace/redhat-operators-fj2z6" Sep 29 10:42:21 crc kubenswrapper[4779]: I0929 10:42:21.490730 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vg8g8\" (UniqueName: \"kubernetes.io/projected/6ed86c9e-1efd-477d-9187-26b91bd90e93-kube-api-access-vg8g8\") pod \"redhat-operators-fj2z6\" (UID: \"6ed86c9e-1efd-477d-9187-26b91bd90e93\") " pod="openshift-marketplace/redhat-operators-fj2z6" Sep 29 10:42:21 crc kubenswrapper[4779]: I0929 10:42:21.593171 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6ed86c9e-1efd-477d-9187-26b91bd90e93-utilities\") pod \"redhat-operators-fj2z6\" (UID: \"6ed86c9e-1efd-477d-9187-26b91bd90e93\") " pod="openshift-marketplace/redhat-operators-fj2z6" Sep 29 10:42:21 crc kubenswrapper[4779]: I0929 10:42:21.593257 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vg8g8\" (UniqueName: \"kubernetes.io/projected/6ed86c9e-1efd-477d-9187-26b91bd90e93-kube-api-access-vg8g8\") pod \"redhat-operators-fj2z6\" (UID: \"6ed86c9e-1efd-477d-9187-26b91bd90e93\") " pod="openshift-marketplace/redhat-operators-fj2z6" Sep 29 10:42:21 crc kubenswrapper[4779]: I0929 10:42:21.593405 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6ed86c9e-1efd-477d-9187-26b91bd90e93-catalog-content\") pod \"redhat-operators-fj2z6\" (UID: \"6ed86c9e-1efd-477d-9187-26b91bd90e93\") " pod="openshift-marketplace/redhat-operators-fj2z6" Sep 29 10:42:21 crc kubenswrapper[4779]: I0929 10:42:21.593767 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6ed86c9e-1efd-477d-9187-26b91bd90e93-utilities\") pod \"redhat-operators-fj2z6\" (UID: \"6ed86c9e-1efd-477d-9187-26b91bd90e93\") " pod="openshift-marketplace/redhat-operators-fj2z6" Sep 29 10:42:21 crc kubenswrapper[4779]: I0929 10:42:21.593792 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6ed86c9e-1efd-477d-9187-26b91bd90e93-catalog-content\") pod \"redhat-operators-fj2z6\" (UID: \"6ed86c9e-1efd-477d-9187-26b91bd90e93\") " pod="openshift-marketplace/redhat-operators-fj2z6" Sep 29 10:42:21 crc kubenswrapper[4779]: I0929 10:42:21.622135 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vg8g8\" (UniqueName: \"kubernetes.io/projected/6ed86c9e-1efd-477d-9187-26b91bd90e93-kube-api-access-vg8g8\") pod \"redhat-operators-fj2z6\" (UID: \"6ed86c9e-1efd-477d-9187-26b91bd90e93\") " pod="openshift-marketplace/redhat-operators-fj2z6" Sep 29 10:42:21 crc kubenswrapper[4779]: I0929 10:42:21.759354 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fj2z6" Sep 29 10:42:22 crc kubenswrapper[4779]: I0929 10:42:22.256063 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fj2z6"] Sep 29 10:42:22 crc kubenswrapper[4779]: I0929 10:42:22.804300 4779 generic.go:334] "Generic (PLEG): container finished" podID="6ed86c9e-1efd-477d-9187-26b91bd90e93" containerID="58cbf7a2abdc2d24174a164c56aee4c374aaa9d537140748f102e36121a8e0d7" exitCode=0 Sep 29 10:42:22 crc kubenswrapper[4779]: I0929 10:42:22.804411 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fj2z6" event={"ID":"6ed86c9e-1efd-477d-9187-26b91bd90e93","Type":"ContainerDied","Data":"58cbf7a2abdc2d24174a164c56aee4c374aaa9d537140748f102e36121a8e0d7"} Sep 29 10:42:22 crc kubenswrapper[4779]: I0929 10:42:22.804646 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fj2z6" event={"ID":"6ed86c9e-1efd-477d-9187-26b91bd90e93","Type":"ContainerStarted","Data":"71e390fa2177a8dfc55bcbdca5d3998743d65681313112837a5712c2a6571fd1"} Sep 29 10:42:23 crc kubenswrapper[4779]: I0929 10:42:23.822149 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fj2z6" event={"ID":"6ed86c9e-1efd-477d-9187-26b91bd90e93","Type":"ContainerStarted","Data":"d8e2dc041778f114a7897695da9c8b2f73a2d93db2d9949aed70513d5098edbe"} Sep 29 10:42:25 crc kubenswrapper[4779]: I0929 10:42:25.841426 4779 generic.go:334] "Generic (PLEG): container finished" podID="6ed86c9e-1efd-477d-9187-26b91bd90e93" containerID="d8e2dc041778f114a7897695da9c8b2f73a2d93db2d9949aed70513d5098edbe" exitCode=0 Sep 29 10:42:25 crc kubenswrapper[4779]: I0929 10:42:25.841515 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fj2z6" event={"ID":"6ed86c9e-1efd-477d-9187-26b91bd90e93","Type":"ContainerDied","Data":"d8e2dc041778f114a7897695da9c8b2f73a2d93db2d9949aed70513d5098edbe"} Sep 29 10:42:26 crc kubenswrapper[4779]: I0929 10:42:26.855017 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fj2z6" event={"ID":"6ed86c9e-1efd-477d-9187-26b91bd90e93","Type":"ContainerStarted","Data":"a03089fa7566626e0265ced37338052b09b5290442e2503391e084d3437f5331"} Sep 29 10:42:26 crc kubenswrapper[4779]: I0929 10:42:26.882082 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-fj2z6" podStartSLOduration=2.4256607150000002 podStartE2EDuration="5.882059118s" podCreationTimestamp="2025-09-29 10:42:21 +0000 UTC" firstStartedPulling="2025-09-29 10:42:22.806431604 +0000 UTC m=+4374.787755508" lastFinishedPulling="2025-09-29 10:42:26.262829997 +0000 UTC m=+4378.244153911" observedRunningTime="2025-09-29 10:42:26.876693472 +0000 UTC m=+4378.858017376" watchObservedRunningTime="2025-09-29 10:42:26.882059118 +0000 UTC m=+4378.863383032" Sep 29 10:42:31 crc kubenswrapper[4779]: I0929 10:42:31.760442 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-fj2z6" Sep 29 10:42:31 crc kubenswrapper[4779]: I0929 10:42:31.761116 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-fj2z6" Sep 29 10:42:31 crc kubenswrapper[4779]: I0929 10:42:31.810683 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-fj2z6" Sep 29 10:42:31 crc kubenswrapper[4779]: I0929 10:42:31.950971 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-fj2z6" Sep 29 10:42:32 crc kubenswrapper[4779]: I0929 10:42:32.060059 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fj2z6"] Sep 29 10:42:33 crc kubenswrapper[4779]: I0929 10:42:33.919414 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-fj2z6" podUID="6ed86c9e-1efd-477d-9187-26b91bd90e93" containerName="registry-server" containerID="cri-o://a03089fa7566626e0265ced37338052b09b5290442e2503391e084d3437f5331" gracePeriod=2 Sep 29 10:42:34 crc kubenswrapper[4779]: I0929 10:42:34.464209 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fj2z6" Sep 29 10:42:34 crc kubenswrapper[4779]: I0929 10:42:34.486781 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6ed86c9e-1efd-477d-9187-26b91bd90e93-catalog-content\") pod \"6ed86c9e-1efd-477d-9187-26b91bd90e93\" (UID: \"6ed86c9e-1efd-477d-9187-26b91bd90e93\") " Sep 29 10:42:34 crc kubenswrapper[4779]: I0929 10:42:34.487209 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vg8g8\" (UniqueName: \"kubernetes.io/projected/6ed86c9e-1efd-477d-9187-26b91bd90e93-kube-api-access-vg8g8\") pod \"6ed86c9e-1efd-477d-9187-26b91bd90e93\" (UID: \"6ed86c9e-1efd-477d-9187-26b91bd90e93\") " Sep 29 10:42:34 crc kubenswrapper[4779]: I0929 10:42:34.487441 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6ed86c9e-1efd-477d-9187-26b91bd90e93-utilities\") pod \"6ed86c9e-1efd-477d-9187-26b91bd90e93\" (UID: \"6ed86c9e-1efd-477d-9187-26b91bd90e93\") " Sep 29 10:42:34 crc kubenswrapper[4779]: I0929 10:42:34.489177 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6ed86c9e-1efd-477d-9187-26b91bd90e93-utilities" (OuterVolumeSpecName: "utilities") pod "6ed86c9e-1efd-477d-9187-26b91bd90e93" (UID: "6ed86c9e-1efd-477d-9187-26b91bd90e93"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 10:42:34 crc kubenswrapper[4779]: I0929 10:42:34.497317 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ed86c9e-1efd-477d-9187-26b91bd90e93-kube-api-access-vg8g8" (OuterVolumeSpecName: "kube-api-access-vg8g8") pod "6ed86c9e-1efd-477d-9187-26b91bd90e93" (UID: "6ed86c9e-1efd-477d-9187-26b91bd90e93"). InnerVolumeSpecName "kube-api-access-vg8g8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 10:42:34 crc kubenswrapper[4779]: I0929 10:42:34.589463 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6ed86c9e-1efd-477d-9187-26b91bd90e93-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 10:42:34 crc kubenswrapper[4779]: I0929 10:42:34.589494 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vg8g8\" (UniqueName: \"kubernetes.io/projected/6ed86c9e-1efd-477d-9187-26b91bd90e93-kube-api-access-vg8g8\") on node \"crc\" DevicePath \"\"" Sep 29 10:42:34 crc kubenswrapper[4779]: I0929 10:42:34.603699 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6ed86c9e-1efd-477d-9187-26b91bd90e93-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6ed86c9e-1efd-477d-9187-26b91bd90e93" (UID: "6ed86c9e-1efd-477d-9187-26b91bd90e93"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 10:42:34 crc kubenswrapper[4779]: I0929 10:42:34.691432 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6ed86c9e-1efd-477d-9187-26b91bd90e93-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 10:42:34 crc kubenswrapper[4779]: I0929 10:42:34.931250 4779 generic.go:334] "Generic (PLEG): container finished" podID="6ed86c9e-1efd-477d-9187-26b91bd90e93" containerID="a03089fa7566626e0265ced37338052b09b5290442e2503391e084d3437f5331" exitCode=0 Sep 29 10:42:34 crc kubenswrapper[4779]: I0929 10:42:34.931295 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fj2z6" event={"ID":"6ed86c9e-1efd-477d-9187-26b91bd90e93","Type":"ContainerDied","Data":"a03089fa7566626e0265ced37338052b09b5290442e2503391e084d3437f5331"} Sep 29 10:42:34 crc kubenswrapper[4779]: I0929 10:42:34.931335 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fj2z6" event={"ID":"6ed86c9e-1efd-477d-9187-26b91bd90e93","Type":"ContainerDied","Data":"71e390fa2177a8dfc55bcbdca5d3998743d65681313112837a5712c2a6571fd1"} Sep 29 10:42:34 crc kubenswrapper[4779]: I0929 10:42:34.931352 4779 scope.go:117] "RemoveContainer" containerID="a03089fa7566626e0265ced37338052b09b5290442e2503391e084d3437f5331" Sep 29 10:42:34 crc kubenswrapper[4779]: I0929 10:42:34.931371 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fj2z6" Sep 29 10:42:34 crc kubenswrapper[4779]: I0929 10:42:34.958837 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fj2z6"] Sep 29 10:42:34 crc kubenswrapper[4779]: I0929 10:42:34.964975 4779 scope.go:117] "RemoveContainer" containerID="d8e2dc041778f114a7897695da9c8b2f73a2d93db2d9949aed70513d5098edbe" Sep 29 10:42:34 crc kubenswrapper[4779]: I0929 10:42:34.980960 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-fj2z6"] Sep 29 10:42:34 crc kubenswrapper[4779]: I0929 10:42:34.997916 4779 scope.go:117] "RemoveContainer" containerID="58cbf7a2abdc2d24174a164c56aee4c374aaa9d537140748f102e36121a8e0d7" Sep 29 10:42:35 crc kubenswrapper[4779]: I0929 10:42:35.055982 4779 scope.go:117] "RemoveContainer" containerID="a03089fa7566626e0265ced37338052b09b5290442e2503391e084d3437f5331" Sep 29 10:42:35 crc kubenswrapper[4779]: E0929 10:42:35.056949 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a03089fa7566626e0265ced37338052b09b5290442e2503391e084d3437f5331\": container with ID starting with a03089fa7566626e0265ced37338052b09b5290442e2503391e084d3437f5331 not found: ID does not exist" containerID="a03089fa7566626e0265ced37338052b09b5290442e2503391e084d3437f5331" Sep 29 10:42:35 crc kubenswrapper[4779]: I0929 10:42:35.056987 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a03089fa7566626e0265ced37338052b09b5290442e2503391e084d3437f5331"} err="failed to get container status \"a03089fa7566626e0265ced37338052b09b5290442e2503391e084d3437f5331\": rpc error: code = NotFound desc = could not find container \"a03089fa7566626e0265ced37338052b09b5290442e2503391e084d3437f5331\": container with ID starting with a03089fa7566626e0265ced37338052b09b5290442e2503391e084d3437f5331 not found: ID does not exist" Sep 29 10:42:35 crc kubenswrapper[4779]: I0929 10:42:35.057012 4779 scope.go:117] "RemoveContainer" containerID="d8e2dc041778f114a7897695da9c8b2f73a2d93db2d9949aed70513d5098edbe" Sep 29 10:42:35 crc kubenswrapper[4779]: E0929 10:42:35.058970 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d8e2dc041778f114a7897695da9c8b2f73a2d93db2d9949aed70513d5098edbe\": container with ID starting with d8e2dc041778f114a7897695da9c8b2f73a2d93db2d9949aed70513d5098edbe not found: ID does not exist" containerID="d8e2dc041778f114a7897695da9c8b2f73a2d93db2d9949aed70513d5098edbe" Sep 29 10:42:35 crc kubenswrapper[4779]: I0929 10:42:35.059015 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d8e2dc041778f114a7897695da9c8b2f73a2d93db2d9949aed70513d5098edbe"} err="failed to get container status \"d8e2dc041778f114a7897695da9c8b2f73a2d93db2d9949aed70513d5098edbe\": rpc error: code = NotFound desc = could not find container \"d8e2dc041778f114a7897695da9c8b2f73a2d93db2d9949aed70513d5098edbe\": container with ID starting with d8e2dc041778f114a7897695da9c8b2f73a2d93db2d9949aed70513d5098edbe not found: ID does not exist" Sep 29 10:42:35 crc kubenswrapper[4779]: I0929 10:42:35.059045 4779 scope.go:117] "RemoveContainer" containerID="58cbf7a2abdc2d24174a164c56aee4c374aaa9d537140748f102e36121a8e0d7" Sep 29 10:42:35 crc kubenswrapper[4779]: E0929 10:42:35.060308 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"58cbf7a2abdc2d24174a164c56aee4c374aaa9d537140748f102e36121a8e0d7\": container with ID starting with 58cbf7a2abdc2d24174a164c56aee4c374aaa9d537140748f102e36121a8e0d7 not found: ID does not exist" containerID="58cbf7a2abdc2d24174a164c56aee4c374aaa9d537140748f102e36121a8e0d7" Sep 29 10:42:35 crc kubenswrapper[4779]: I0929 10:42:35.060363 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"58cbf7a2abdc2d24174a164c56aee4c374aaa9d537140748f102e36121a8e0d7"} err="failed to get container status \"58cbf7a2abdc2d24174a164c56aee4c374aaa9d537140748f102e36121a8e0d7\": rpc error: code = NotFound desc = could not find container \"58cbf7a2abdc2d24174a164c56aee4c374aaa9d537140748f102e36121a8e0d7\": container with ID starting with 58cbf7a2abdc2d24174a164c56aee4c374aaa9d537140748f102e36121a8e0d7 not found: ID does not exist" Sep 29 10:42:36 crc kubenswrapper[4779]: I0929 10:42:36.728269 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ed86c9e-1efd-477d-9187-26b91bd90e93" path="/var/lib/kubelet/pods/6ed86c9e-1efd-477d-9187-26b91bd90e93/volumes" Sep 29 10:44:46 crc kubenswrapper[4779]: I0929 10:44:46.966811 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 10:44:46 crc kubenswrapper[4779]: I0929 10:44:46.967386 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 10:45:00 crc kubenswrapper[4779]: I0929 10:45:00.163648 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319045-fkh9p"] Sep 29 10:45:00 crc kubenswrapper[4779]: E0929 10:45:00.164762 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ed86c9e-1efd-477d-9187-26b91bd90e93" containerName="extract-utilities" Sep 29 10:45:00 crc kubenswrapper[4779]: I0929 10:45:00.164779 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ed86c9e-1efd-477d-9187-26b91bd90e93" containerName="extract-utilities" Sep 29 10:45:00 crc kubenswrapper[4779]: E0929 10:45:00.164810 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ed86c9e-1efd-477d-9187-26b91bd90e93" containerName="registry-server" Sep 29 10:45:00 crc kubenswrapper[4779]: I0929 10:45:00.164816 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ed86c9e-1efd-477d-9187-26b91bd90e93" containerName="registry-server" Sep 29 10:45:00 crc kubenswrapper[4779]: E0929 10:45:00.164841 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ed86c9e-1efd-477d-9187-26b91bd90e93" containerName="extract-content" Sep 29 10:45:00 crc kubenswrapper[4779]: I0929 10:45:00.164847 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ed86c9e-1efd-477d-9187-26b91bd90e93" containerName="extract-content" Sep 29 10:45:00 crc kubenswrapper[4779]: I0929 10:45:00.165092 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="6ed86c9e-1efd-477d-9187-26b91bd90e93" containerName="registry-server" Sep 29 10:45:00 crc kubenswrapper[4779]: I0929 10:45:00.165913 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319045-fkh9p" Sep 29 10:45:00 crc kubenswrapper[4779]: I0929 10:45:00.171317 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 29 10:45:00 crc kubenswrapper[4779]: I0929 10:45:00.173109 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 29 10:45:00 crc kubenswrapper[4779]: I0929 10:45:00.180452 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319045-fkh9p"] Sep 29 10:45:00 crc kubenswrapper[4779]: I0929 10:45:00.269374 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/76a366f3-0959-48da-9e6c-33c4723e8cf6-config-volume\") pod \"collect-profiles-29319045-fkh9p\" (UID: \"76a366f3-0959-48da-9e6c-33c4723e8cf6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319045-fkh9p" Sep 29 10:45:00 crc kubenswrapper[4779]: I0929 10:45:00.269704 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qbxvh\" (UniqueName: \"kubernetes.io/projected/76a366f3-0959-48da-9e6c-33c4723e8cf6-kube-api-access-qbxvh\") pod \"collect-profiles-29319045-fkh9p\" (UID: \"76a366f3-0959-48da-9e6c-33c4723e8cf6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319045-fkh9p" Sep 29 10:45:00 crc kubenswrapper[4779]: I0929 10:45:00.269876 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/76a366f3-0959-48da-9e6c-33c4723e8cf6-secret-volume\") pod \"collect-profiles-29319045-fkh9p\" (UID: \"76a366f3-0959-48da-9e6c-33c4723e8cf6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319045-fkh9p" Sep 29 10:45:00 crc kubenswrapper[4779]: I0929 10:45:00.372558 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/76a366f3-0959-48da-9e6c-33c4723e8cf6-secret-volume\") pod \"collect-profiles-29319045-fkh9p\" (UID: \"76a366f3-0959-48da-9e6c-33c4723e8cf6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319045-fkh9p" Sep 29 10:45:00 crc kubenswrapper[4779]: I0929 10:45:00.372719 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/76a366f3-0959-48da-9e6c-33c4723e8cf6-config-volume\") pod \"collect-profiles-29319045-fkh9p\" (UID: \"76a366f3-0959-48da-9e6c-33c4723e8cf6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319045-fkh9p" Sep 29 10:45:00 crc kubenswrapper[4779]: I0929 10:45:00.372784 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qbxvh\" (UniqueName: \"kubernetes.io/projected/76a366f3-0959-48da-9e6c-33c4723e8cf6-kube-api-access-qbxvh\") pod \"collect-profiles-29319045-fkh9p\" (UID: \"76a366f3-0959-48da-9e6c-33c4723e8cf6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319045-fkh9p" Sep 29 10:45:00 crc kubenswrapper[4779]: I0929 10:45:00.373711 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/76a366f3-0959-48da-9e6c-33c4723e8cf6-config-volume\") pod \"collect-profiles-29319045-fkh9p\" (UID: \"76a366f3-0959-48da-9e6c-33c4723e8cf6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319045-fkh9p" Sep 29 10:45:00 crc kubenswrapper[4779]: I0929 10:45:00.379523 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/76a366f3-0959-48da-9e6c-33c4723e8cf6-secret-volume\") pod \"collect-profiles-29319045-fkh9p\" (UID: \"76a366f3-0959-48da-9e6c-33c4723e8cf6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319045-fkh9p" Sep 29 10:45:00 crc kubenswrapper[4779]: I0929 10:45:00.391421 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qbxvh\" (UniqueName: \"kubernetes.io/projected/76a366f3-0959-48da-9e6c-33c4723e8cf6-kube-api-access-qbxvh\") pod \"collect-profiles-29319045-fkh9p\" (UID: \"76a366f3-0959-48da-9e6c-33c4723e8cf6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319045-fkh9p" Sep 29 10:45:00 crc kubenswrapper[4779]: I0929 10:45:00.497375 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319045-fkh9p" Sep 29 10:45:00 crc kubenswrapper[4779]: I0929 10:45:00.962309 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319045-fkh9p"] Sep 29 10:45:01 crc kubenswrapper[4779]: I0929 10:45:01.405877 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319045-fkh9p" event={"ID":"76a366f3-0959-48da-9e6c-33c4723e8cf6","Type":"ContainerStarted","Data":"470d28fbf2dfd71e3fde17b71cd382612017f40e5528224db4fe1fa53d1800b1"} Sep 29 10:45:01 crc kubenswrapper[4779]: I0929 10:45:01.405949 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319045-fkh9p" event={"ID":"76a366f3-0959-48da-9e6c-33c4723e8cf6","Type":"ContainerStarted","Data":"5258590bc3a8efa9b4ce55691783c905968cb5c8b9838c76e6a5740f3cfe1e39"} Sep 29 10:45:01 crc kubenswrapper[4779]: I0929 10:45:01.442659 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29319045-fkh9p" podStartSLOduration=1.442638042 podStartE2EDuration="1.442638042s" podCreationTimestamp="2025-09-29 10:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 10:45:01.434405763 +0000 UTC m=+4533.415729667" watchObservedRunningTime="2025-09-29 10:45:01.442638042 +0000 UTC m=+4533.423961946" Sep 29 10:45:02 crc kubenswrapper[4779]: I0929 10:45:02.419445 4779 generic.go:334] "Generic (PLEG): container finished" podID="76a366f3-0959-48da-9e6c-33c4723e8cf6" containerID="470d28fbf2dfd71e3fde17b71cd382612017f40e5528224db4fe1fa53d1800b1" exitCode=0 Sep 29 10:45:02 crc kubenswrapper[4779]: I0929 10:45:02.419490 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319045-fkh9p" event={"ID":"76a366f3-0959-48da-9e6c-33c4723e8cf6","Type":"ContainerDied","Data":"470d28fbf2dfd71e3fde17b71cd382612017f40e5528224db4fe1fa53d1800b1"} Sep 29 10:45:03 crc kubenswrapper[4779]: I0929 10:45:03.821583 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319045-fkh9p" Sep 29 10:45:03 crc kubenswrapper[4779]: I0929 10:45:03.857995 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qbxvh\" (UniqueName: \"kubernetes.io/projected/76a366f3-0959-48da-9e6c-33c4723e8cf6-kube-api-access-qbxvh\") pod \"76a366f3-0959-48da-9e6c-33c4723e8cf6\" (UID: \"76a366f3-0959-48da-9e6c-33c4723e8cf6\") " Sep 29 10:45:03 crc kubenswrapper[4779]: I0929 10:45:03.858050 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/76a366f3-0959-48da-9e6c-33c4723e8cf6-config-volume\") pod \"76a366f3-0959-48da-9e6c-33c4723e8cf6\" (UID: \"76a366f3-0959-48da-9e6c-33c4723e8cf6\") " Sep 29 10:45:03 crc kubenswrapper[4779]: I0929 10:45:03.858089 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/76a366f3-0959-48da-9e6c-33c4723e8cf6-secret-volume\") pod \"76a366f3-0959-48da-9e6c-33c4723e8cf6\" (UID: \"76a366f3-0959-48da-9e6c-33c4723e8cf6\") " Sep 29 10:45:03 crc kubenswrapper[4779]: I0929 10:45:03.858885 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76a366f3-0959-48da-9e6c-33c4723e8cf6-config-volume" (OuterVolumeSpecName: "config-volume") pod "76a366f3-0959-48da-9e6c-33c4723e8cf6" (UID: "76a366f3-0959-48da-9e6c-33c4723e8cf6"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 10:45:03 crc kubenswrapper[4779]: I0929 10:45:03.865244 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76a366f3-0959-48da-9e6c-33c4723e8cf6-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "76a366f3-0959-48da-9e6c-33c4723e8cf6" (UID: "76a366f3-0959-48da-9e6c-33c4723e8cf6"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 10:45:03 crc kubenswrapper[4779]: I0929 10:45:03.865627 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/76a366f3-0959-48da-9e6c-33c4723e8cf6-kube-api-access-qbxvh" (OuterVolumeSpecName: "kube-api-access-qbxvh") pod "76a366f3-0959-48da-9e6c-33c4723e8cf6" (UID: "76a366f3-0959-48da-9e6c-33c4723e8cf6"). InnerVolumeSpecName "kube-api-access-qbxvh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 10:45:03 crc kubenswrapper[4779]: I0929 10:45:03.960755 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qbxvh\" (UniqueName: \"kubernetes.io/projected/76a366f3-0959-48da-9e6c-33c4723e8cf6-kube-api-access-qbxvh\") on node \"crc\" DevicePath \"\"" Sep 29 10:45:03 crc kubenswrapper[4779]: I0929 10:45:03.960793 4779 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/76a366f3-0959-48da-9e6c-33c4723e8cf6-config-volume\") on node \"crc\" DevicePath \"\"" Sep 29 10:45:03 crc kubenswrapper[4779]: I0929 10:45:03.960802 4779 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/76a366f3-0959-48da-9e6c-33c4723e8cf6-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 29 10:45:04 crc kubenswrapper[4779]: I0929 10:45:04.443060 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319045-fkh9p" event={"ID":"76a366f3-0959-48da-9e6c-33c4723e8cf6","Type":"ContainerDied","Data":"5258590bc3a8efa9b4ce55691783c905968cb5c8b9838c76e6a5740f3cfe1e39"} Sep 29 10:45:04 crc kubenswrapper[4779]: I0929 10:45:04.443384 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5258590bc3a8efa9b4ce55691783c905968cb5c8b9838c76e6a5740f3cfe1e39" Sep 29 10:45:04 crc kubenswrapper[4779]: I0929 10:45:04.443174 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319045-fkh9p" Sep 29 10:45:04 crc kubenswrapper[4779]: I0929 10:45:04.516346 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319000-q2qmm"] Sep 29 10:45:04 crc kubenswrapper[4779]: I0929 10:45:04.523935 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319000-q2qmm"] Sep 29 10:45:04 crc kubenswrapper[4779]: I0929 10:45:04.726345 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7ff3152a-f2c7-4dd6-aa23-f45ffb5f8a35" path="/var/lib/kubelet/pods/7ff3152a-f2c7-4dd6-aa23-f45ffb5f8a35/volumes" Sep 29 10:45:16 crc kubenswrapper[4779]: I0929 10:45:16.967190 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 10:45:16 crc kubenswrapper[4779]: I0929 10:45:16.967886 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 10:45:46 crc kubenswrapper[4779]: I0929 10:45:46.966404 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 10:45:46 crc kubenswrapper[4779]: I0929 10:45:46.967312 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 10:45:46 crc kubenswrapper[4779]: I0929 10:45:46.967384 4779 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" Sep 29 10:45:46 crc kubenswrapper[4779]: I0929 10:45:46.968295 4779 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1865f24d23363df0a5c4badf3f6f96c7f06b8d41471c03b2b2693d9d64bf4ee6"} pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 10:45:46 crc kubenswrapper[4779]: I0929 10:45:46.968349 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" containerID="cri-o://1865f24d23363df0a5c4badf3f6f96c7f06b8d41471c03b2b2693d9d64bf4ee6" gracePeriod=600 Sep 29 10:45:47 crc kubenswrapper[4779]: E0929 10:45:47.108758 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:45:47 crc kubenswrapper[4779]: I0929 10:45:47.928373 4779 generic.go:334] "Generic (PLEG): container finished" podID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerID="1865f24d23363df0a5c4badf3f6f96c7f06b8d41471c03b2b2693d9d64bf4ee6" exitCode=0 Sep 29 10:45:47 crc kubenswrapper[4779]: I0929 10:45:47.928556 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" event={"ID":"f1a5d3a7-37d9-4a87-864c-e4af7f504a19","Type":"ContainerDied","Data":"1865f24d23363df0a5c4badf3f6f96c7f06b8d41471c03b2b2693d9d64bf4ee6"} Sep 29 10:45:47 crc kubenswrapper[4779]: I0929 10:45:47.928766 4779 scope.go:117] "RemoveContainer" containerID="86a2ac1679839d5c319699379380e594382e77f38d67d1764ae1bcceb4ae8cef" Sep 29 10:45:47 crc kubenswrapper[4779]: I0929 10:45:47.929644 4779 scope.go:117] "RemoveContainer" containerID="1865f24d23363df0a5c4badf3f6f96c7f06b8d41471c03b2b2693d9d64bf4ee6" Sep 29 10:45:47 crc kubenswrapper[4779]: E0929 10:45:47.929983 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:45:50 crc kubenswrapper[4779]: I0929 10:45:50.822002 4779 scope.go:117] "RemoveContainer" containerID="e69ecf402440ab6cd373a336f634be27c89048d268149f96ecc0daddce5dcc86" Sep 29 10:45:58 crc kubenswrapper[4779]: I0929 10:45:58.728261 4779 scope.go:117] "RemoveContainer" containerID="1865f24d23363df0a5c4badf3f6f96c7f06b8d41471c03b2b2693d9d64bf4ee6" Sep 29 10:45:58 crc kubenswrapper[4779]: E0929 10:45:58.729572 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:46:09 crc kubenswrapper[4779]: I0929 10:46:09.116798 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/openstack-galera-0" podUID="bb5f88d2-6663-4ed6-a7a7-93ee500c9edf" containerName="galera" probeResult="failure" output="command timed out" Sep 29 10:46:09 crc kubenswrapper[4779]: I0929 10:46:09.120041 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/openstack-galera-0" podUID="bb5f88d2-6663-4ed6-a7a7-93ee500c9edf" containerName="galera" probeResult="failure" output="command timed out" Sep 29 10:46:11 crc kubenswrapper[4779]: I0929 10:46:11.715188 4779 scope.go:117] "RemoveContainer" containerID="1865f24d23363df0a5c4badf3f6f96c7f06b8d41471c03b2b2693d9d64bf4ee6" Sep 29 10:46:11 crc kubenswrapper[4779]: E0929 10:46:11.716080 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:46:23 crc kubenswrapper[4779]: I0929 10:46:23.714358 4779 scope.go:117] "RemoveContainer" containerID="1865f24d23363df0a5c4badf3f6f96c7f06b8d41471c03b2b2693d9d64bf4ee6" Sep 29 10:46:23 crc kubenswrapper[4779]: E0929 10:46:23.715599 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:46:36 crc kubenswrapper[4779]: I0929 10:46:36.715090 4779 scope.go:117] "RemoveContainer" containerID="1865f24d23363df0a5c4badf3f6f96c7f06b8d41471c03b2b2693d9d64bf4ee6" Sep 29 10:46:36 crc kubenswrapper[4779]: E0929 10:46:36.716344 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:46:47 crc kubenswrapper[4779]: I0929 10:46:47.714389 4779 scope.go:117] "RemoveContainer" containerID="1865f24d23363df0a5c4badf3f6f96c7f06b8d41471c03b2b2693d9d64bf4ee6" Sep 29 10:46:47 crc kubenswrapper[4779]: E0929 10:46:47.715672 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:47:02 crc kubenswrapper[4779]: I0929 10:47:02.715209 4779 scope.go:117] "RemoveContainer" containerID="1865f24d23363df0a5c4badf3f6f96c7f06b8d41471c03b2b2693d9d64bf4ee6" Sep 29 10:47:02 crc kubenswrapper[4779]: E0929 10:47:02.716948 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:47:14 crc kubenswrapper[4779]: I0929 10:47:14.714810 4779 scope.go:117] "RemoveContainer" containerID="1865f24d23363df0a5c4badf3f6f96c7f06b8d41471c03b2b2693d9d64bf4ee6" Sep 29 10:47:14 crc kubenswrapper[4779]: E0929 10:47:14.715926 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:47:25 crc kubenswrapper[4779]: I0929 10:47:25.715103 4779 scope.go:117] "RemoveContainer" containerID="1865f24d23363df0a5c4badf3f6f96c7f06b8d41471c03b2b2693d9d64bf4ee6" Sep 29 10:47:25 crc kubenswrapper[4779]: E0929 10:47:25.716525 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:47:37 crc kubenswrapper[4779]: I0929 10:47:37.714793 4779 scope.go:117] "RemoveContainer" containerID="1865f24d23363df0a5c4badf3f6f96c7f06b8d41471c03b2b2693d9d64bf4ee6" Sep 29 10:47:37 crc kubenswrapper[4779]: E0929 10:47:37.715966 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:47:50 crc kubenswrapper[4779]: I0929 10:47:50.714939 4779 scope.go:117] "RemoveContainer" containerID="1865f24d23363df0a5c4badf3f6f96c7f06b8d41471c03b2b2693d9d64bf4ee6" Sep 29 10:47:50 crc kubenswrapper[4779]: E0929 10:47:50.715985 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:48:03 crc kubenswrapper[4779]: I0929 10:48:03.714999 4779 scope.go:117] "RemoveContainer" containerID="1865f24d23363df0a5c4badf3f6f96c7f06b8d41471c03b2b2693d9d64bf4ee6" Sep 29 10:48:03 crc kubenswrapper[4779]: E0929 10:48:03.716091 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:48:14 crc kubenswrapper[4779]: I0929 10:48:14.715292 4779 scope.go:117] "RemoveContainer" containerID="1865f24d23363df0a5c4badf3f6f96c7f06b8d41471c03b2b2693d9d64bf4ee6" Sep 29 10:48:14 crc kubenswrapper[4779]: E0929 10:48:14.716405 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:48:18 crc kubenswrapper[4779]: I0929 10:48:18.941394 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-7rcq7"] Sep 29 10:48:18 crc kubenswrapper[4779]: E0929 10:48:18.942766 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76a366f3-0959-48da-9e6c-33c4723e8cf6" containerName="collect-profiles" Sep 29 10:48:18 crc kubenswrapper[4779]: I0929 10:48:18.942786 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="76a366f3-0959-48da-9e6c-33c4723e8cf6" containerName="collect-profiles" Sep 29 10:48:18 crc kubenswrapper[4779]: I0929 10:48:18.943056 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="76a366f3-0959-48da-9e6c-33c4723e8cf6" containerName="collect-profiles" Sep 29 10:48:18 crc kubenswrapper[4779]: I0929 10:48:18.945345 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7rcq7" Sep 29 10:48:18 crc kubenswrapper[4779]: I0929 10:48:18.974016 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7rcq7"] Sep 29 10:48:19 crc kubenswrapper[4779]: I0929 10:48:19.062428 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4cc01b93-0735-4f89-8d78-a06458df6ee3-catalog-content\") pod \"community-operators-7rcq7\" (UID: \"4cc01b93-0735-4f89-8d78-a06458df6ee3\") " pod="openshift-marketplace/community-operators-7rcq7" Sep 29 10:48:19 crc kubenswrapper[4779]: I0929 10:48:19.062521 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rlnkx\" (UniqueName: \"kubernetes.io/projected/4cc01b93-0735-4f89-8d78-a06458df6ee3-kube-api-access-rlnkx\") pod \"community-operators-7rcq7\" (UID: \"4cc01b93-0735-4f89-8d78-a06458df6ee3\") " pod="openshift-marketplace/community-operators-7rcq7" Sep 29 10:48:19 crc kubenswrapper[4779]: I0929 10:48:19.062631 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4cc01b93-0735-4f89-8d78-a06458df6ee3-utilities\") pod \"community-operators-7rcq7\" (UID: \"4cc01b93-0735-4f89-8d78-a06458df6ee3\") " pod="openshift-marketplace/community-operators-7rcq7" Sep 29 10:48:19 crc kubenswrapper[4779]: I0929 10:48:19.164791 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4cc01b93-0735-4f89-8d78-a06458df6ee3-catalog-content\") pod \"community-operators-7rcq7\" (UID: \"4cc01b93-0735-4f89-8d78-a06458df6ee3\") " pod="openshift-marketplace/community-operators-7rcq7" Sep 29 10:48:19 crc kubenswrapper[4779]: I0929 10:48:19.164879 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rlnkx\" (UniqueName: \"kubernetes.io/projected/4cc01b93-0735-4f89-8d78-a06458df6ee3-kube-api-access-rlnkx\") pod \"community-operators-7rcq7\" (UID: \"4cc01b93-0735-4f89-8d78-a06458df6ee3\") " pod="openshift-marketplace/community-operators-7rcq7" Sep 29 10:48:19 crc kubenswrapper[4779]: I0929 10:48:19.165009 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4cc01b93-0735-4f89-8d78-a06458df6ee3-utilities\") pod \"community-operators-7rcq7\" (UID: \"4cc01b93-0735-4f89-8d78-a06458df6ee3\") " pod="openshift-marketplace/community-operators-7rcq7" Sep 29 10:48:19 crc kubenswrapper[4779]: I0929 10:48:19.165633 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4cc01b93-0735-4f89-8d78-a06458df6ee3-utilities\") pod \"community-operators-7rcq7\" (UID: \"4cc01b93-0735-4f89-8d78-a06458df6ee3\") " pod="openshift-marketplace/community-operators-7rcq7" Sep 29 10:48:19 crc kubenswrapper[4779]: I0929 10:48:19.165727 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4cc01b93-0735-4f89-8d78-a06458df6ee3-catalog-content\") pod \"community-operators-7rcq7\" (UID: \"4cc01b93-0735-4f89-8d78-a06458df6ee3\") " pod="openshift-marketplace/community-operators-7rcq7" Sep 29 10:48:19 crc kubenswrapper[4779]: I0929 10:48:19.189779 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rlnkx\" (UniqueName: \"kubernetes.io/projected/4cc01b93-0735-4f89-8d78-a06458df6ee3-kube-api-access-rlnkx\") pod \"community-operators-7rcq7\" (UID: \"4cc01b93-0735-4f89-8d78-a06458df6ee3\") " pod="openshift-marketplace/community-operators-7rcq7" Sep 29 10:48:19 crc kubenswrapper[4779]: I0929 10:48:19.273647 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7rcq7" Sep 29 10:48:19 crc kubenswrapper[4779]: I0929 10:48:19.964479 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7rcq7"] Sep 29 10:48:20 crc kubenswrapper[4779]: I0929 10:48:20.568050 4779 generic.go:334] "Generic (PLEG): container finished" podID="4cc01b93-0735-4f89-8d78-a06458df6ee3" containerID="44078fd7074f147fc8fd879cf580300327fd260e935352c35735a44526a942ad" exitCode=0 Sep 29 10:48:20 crc kubenswrapper[4779]: I0929 10:48:20.568124 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7rcq7" event={"ID":"4cc01b93-0735-4f89-8d78-a06458df6ee3","Type":"ContainerDied","Data":"44078fd7074f147fc8fd879cf580300327fd260e935352c35735a44526a942ad"} Sep 29 10:48:20 crc kubenswrapper[4779]: I0929 10:48:20.568556 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7rcq7" event={"ID":"4cc01b93-0735-4f89-8d78-a06458df6ee3","Type":"ContainerStarted","Data":"03f463400d9f45e18ae67edbb81e6537c1c6bfa3cbb7fa4e367a274e921a25ee"} Sep 29 10:48:20 crc kubenswrapper[4779]: I0929 10:48:20.571451 4779 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 10:48:22 crc kubenswrapper[4779]: I0929 10:48:22.592377 4779 generic.go:334] "Generic (PLEG): container finished" podID="4cc01b93-0735-4f89-8d78-a06458df6ee3" containerID="5c17bab6a486b9b189339696c3b135401172ff343e1f5da283bf9e76239d0e54" exitCode=0 Sep 29 10:48:22 crc kubenswrapper[4779]: I0929 10:48:22.592505 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7rcq7" event={"ID":"4cc01b93-0735-4f89-8d78-a06458df6ee3","Type":"ContainerDied","Data":"5c17bab6a486b9b189339696c3b135401172ff343e1f5da283bf9e76239d0e54"} Sep 29 10:48:23 crc kubenswrapper[4779]: I0929 10:48:23.607044 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7rcq7" event={"ID":"4cc01b93-0735-4f89-8d78-a06458df6ee3","Type":"ContainerStarted","Data":"4393df2151b3e2c30fea787ebee5035f518d40a20210ecefc2094342fa3d956f"} Sep 29 10:48:23 crc kubenswrapper[4779]: I0929 10:48:23.630962 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-7rcq7" podStartSLOduration=3.046244205 podStartE2EDuration="5.630939481s" podCreationTimestamp="2025-09-29 10:48:18 +0000 UTC" firstStartedPulling="2025-09-29 10:48:20.571185821 +0000 UTC m=+4732.552509725" lastFinishedPulling="2025-09-29 10:48:23.155881097 +0000 UTC m=+4735.137205001" observedRunningTime="2025-09-29 10:48:23.628964133 +0000 UTC m=+4735.610288047" watchObservedRunningTime="2025-09-29 10:48:23.630939481 +0000 UTC m=+4735.612263385" Sep 29 10:48:27 crc kubenswrapper[4779]: I0929 10:48:27.714951 4779 scope.go:117] "RemoveContainer" containerID="1865f24d23363df0a5c4badf3f6f96c7f06b8d41471c03b2b2693d9d64bf4ee6" Sep 29 10:48:27 crc kubenswrapper[4779]: E0929 10:48:27.716188 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:48:29 crc kubenswrapper[4779]: I0929 10:48:29.273836 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-7rcq7" Sep 29 10:48:29 crc kubenswrapper[4779]: I0929 10:48:29.274344 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-7rcq7" Sep 29 10:48:29 crc kubenswrapper[4779]: I0929 10:48:29.758284 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-7rcq7" Sep 29 10:48:29 crc kubenswrapper[4779]: I0929 10:48:29.822345 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-7rcq7" Sep 29 10:48:30 crc kubenswrapper[4779]: I0929 10:48:30.001291 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-7rcq7"] Sep 29 10:48:31 crc kubenswrapper[4779]: I0929 10:48:31.699378 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-7rcq7" podUID="4cc01b93-0735-4f89-8d78-a06458df6ee3" containerName="registry-server" containerID="cri-o://4393df2151b3e2c30fea787ebee5035f518d40a20210ecefc2094342fa3d956f" gracePeriod=2 Sep 29 10:48:32 crc kubenswrapper[4779]: I0929 10:48:32.533037 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7rcq7" Sep 29 10:48:32 crc kubenswrapper[4779]: I0929 10:48:32.631857 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4cc01b93-0735-4f89-8d78-a06458df6ee3-utilities\") pod \"4cc01b93-0735-4f89-8d78-a06458df6ee3\" (UID: \"4cc01b93-0735-4f89-8d78-a06458df6ee3\") " Sep 29 10:48:32 crc kubenswrapper[4779]: I0929 10:48:32.632002 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rlnkx\" (UniqueName: \"kubernetes.io/projected/4cc01b93-0735-4f89-8d78-a06458df6ee3-kube-api-access-rlnkx\") pod \"4cc01b93-0735-4f89-8d78-a06458df6ee3\" (UID: \"4cc01b93-0735-4f89-8d78-a06458df6ee3\") " Sep 29 10:48:32 crc kubenswrapper[4779]: I0929 10:48:32.632224 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4cc01b93-0735-4f89-8d78-a06458df6ee3-catalog-content\") pod \"4cc01b93-0735-4f89-8d78-a06458df6ee3\" (UID: \"4cc01b93-0735-4f89-8d78-a06458df6ee3\") " Sep 29 10:48:32 crc kubenswrapper[4779]: I0929 10:48:32.634036 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4cc01b93-0735-4f89-8d78-a06458df6ee3-utilities" (OuterVolumeSpecName: "utilities") pod "4cc01b93-0735-4f89-8d78-a06458df6ee3" (UID: "4cc01b93-0735-4f89-8d78-a06458df6ee3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 10:48:32 crc kubenswrapper[4779]: I0929 10:48:32.640202 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4cc01b93-0735-4f89-8d78-a06458df6ee3-kube-api-access-rlnkx" (OuterVolumeSpecName: "kube-api-access-rlnkx") pod "4cc01b93-0735-4f89-8d78-a06458df6ee3" (UID: "4cc01b93-0735-4f89-8d78-a06458df6ee3"). InnerVolumeSpecName "kube-api-access-rlnkx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 10:48:32 crc kubenswrapper[4779]: I0929 10:48:32.690615 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4cc01b93-0735-4f89-8d78-a06458df6ee3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4cc01b93-0735-4f89-8d78-a06458df6ee3" (UID: "4cc01b93-0735-4f89-8d78-a06458df6ee3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 10:48:32 crc kubenswrapper[4779]: I0929 10:48:32.713147 4779 generic.go:334] "Generic (PLEG): container finished" podID="4cc01b93-0735-4f89-8d78-a06458df6ee3" containerID="4393df2151b3e2c30fea787ebee5035f518d40a20210ecefc2094342fa3d956f" exitCode=0 Sep 29 10:48:32 crc kubenswrapper[4779]: I0929 10:48:32.713300 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7rcq7" Sep 29 10:48:32 crc kubenswrapper[4779]: I0929 10:48:32.726587 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7rcq7" event={"ID":"4cc01b93-0735-4f89-8d78-a06458df6ee3","Type":"ContainerDied","Data":"4393df2151b3e2c30fea787ebee5035f518d40a20210ecefc2094342fa3d956f"} Sep 29 10:48:32 crc kubenswrapper[4779]: I0929 10:48:32.726884 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7rcq7" event={"ID":"4cc01b93-0735-4f89-8d78-a06458df6ee3","Type":"ContainerDied","Data":"03f463400d9f45e18ae67edbb81e6537c1c6bfa3cbb7fa4e367a274e921a25ee"} Sep 29 10:48:32 crc kubenswrapper[4779]: I0929 10:48:32.726992 4779 scope.go:117] "RemoveContainer" containerID="4393df2151b3e2c30fea787ebee5035f518d40a20210ecefc2094342fa3d956f" Sep 29 10:48:32 crc kubenswrapper[4779]: I0929 10:48:32.735032 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4cc01b93-0735-4f89-8d78-a06458df6ee3-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 10:48:32 crc kubenswrapper[4779]: I0929 10:48:32.735086 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4cc01b93-0735-4f89-8d78-a06458df6ee3-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 10:48:32 crc kubenswrapper[4779]: I0929 10:48:32.735106 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rlnkx\" (UniqueName: \"kubernetes.io/projected/4cc01b93-0735-4f89-8d78-a06458df6ee3-kube-api-access-rlnkx\") on node \"crc\" DevicePath \"\"" Sep 29 10:48:32 crc kubenswrapper[4779]: I0929 10:48:32.755821 4779 scope.go:117] "RemoveContainer" containerID="5c17bab6a486b9b189339696c3b135401172ff343e1f5da283bf9e76239d0e54" Sep 29 10:48:32 crc kubenswrapper[4779]: I0929 10:48:32.777670 4779 scope.go:117] "RemoveContainer" containerID="44078fd7074f147fc8fd879cf580300327fd260e935352c35735a44526a942ad" Sep 29 10:48:32 crc kubenswrapper[4779]: I0929 10:48:32.780606 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-7rcq7"] Sep 29 10:48:32 crc kubenswrapper[4779]: I0929 10:48:32.790224 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-7rcq7"] Sep 29 10:48:32 crc kubenswrapper[4779]: I0929 10:48:32.838161 4779 scope.go:117] "RemoveContainer" containerID="4393df2151b3e2c30fea787ebee5035f518d40a20210ecefc2094342fa3d956f" Sep 29 10:48:32 crc kubenswrapper[4779]: E0929 10:48:32.838579 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4393df2151b3e2c30fea787ebee5035f518d40a20210ecefc2094342fa3d956f\": container with ID starting with 4393df2151b3e2c30fea787ebee5035f518d40a20210ecefc2094342fa3d956f not found: ID does not exist" containerID="4393df2151b3e2c30fea787ebee5035f518d40a20210ecefc2094342fa3d956f" Sep 29 10:48:32 crc kubenswrapper[4779]: I0929 10:48:32.838626 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4393df2151b3e2c30fea787ebee5035f518d40a20210ecefc2094342fa3d956f"} err="failed to get container status \"4393df2151b3e2c30fea787ebee5035f518d40a20210ecefc2094342fa3d956f\": rpc error: code = NotFound desc = could not find container \"4393df2151b3e2c30fea787ebee5035f518d40a20210ecefc2094342fa3d956f\": container with ID starting with 4393df2151b3e2c30fea787ebee5035f518d40a20210ecefc2094342fa3d956f not found: ID does not exist" Sep 29 10:48:32 crc kubenswrapper[4779]: I0929 10:48:32.838653 4779 scope.go:117] "RemoveContainer" containerID="5c17bab6a486b9b189339696c3b135401172ff343e1f5da283bf9e76239d0e54" Sep 29 10:48:32 crc kubenswrapper[4779]: E0929 10:48:32.839197 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5c17bab6a486b9b189339696c3b135401172ff343e1f5da283bf9e76239d0e54\": container with ID starting with 5c17bab6a486b9b189339696c3b135401172ff343e1f5da283bf9e76239d0e54 not found: ID does not exist" containerID="5c17bab6a486b9b189339696c3b135401172ff343e1f5da283bf9e76239d0e54" Sep 29 10:48:32 crc kubenswrapper[4779]: I0929 10:48:32.839242 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c17bab6a486b9b189339696c3b135401172ff343e1f5da283bf9e76239d0e54"} err="failed to get container status \"5c17bab6a486b9b189339696c3b135401172ff343e1f5da283bf9e76239d0e54\": rpc error: code = NotFound desc = could not find container \"5c17bab6a486b9b189339696c3b135401172ff343e1f5da283bf9e76239d0e54\": container with ID starting with 5c17bab6a486b9b189339696c3b135401172ff343e1f5da283bf9e76239d0e54 not found: ID does not exist" Sep 29 10:48:32 crc kubenswrapper[4779]: I0929 10:48:32.839267 4779 scope.go:117] "RemoveContainer" containerID="44078fd7074f147fc8fd879cf580300327fd260e935352c35735a44526a942ad" Sep 29 10:48:32 crc kubenswrapper[4779]: E0929 10:48:32.839561 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"44078fd7074f147fc8fd879cf580300327fd260e935352c35735a44526a942ad\": container with ID starting with 44078fd7074f147fc8fd879cf580300327fd260e935352c35735a44526a942ad not found: ID does not exist" containerID="44078fd7074f147fc8fd879cf580300327fd260e935352c35735a44526a942ad" Sep 29 10:48:32 crc kubenswrapper[4779]: I0929 10:48:32.839585 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"44078fd7074f147fc8fd879cf580300327fd260e935352c35735a44526a942ad"} err="failed to get container status \"44078fd7074f147fc8fd879cf580300327fd260e935352c35735a44526a942ad\": rpc error: code = NotFound desc = could not find container \"44078fd7074f147fc8fd879cf580300327fd260e935352c35735a44526a942ad\": container with ID starting with 44078fd7074f147fc8fd879cf580300327fd260e935352c35735a44526a942ad not found: ID does not exist" Sep 29 10:48:34 crc kubenswrapper[4779]: I0929 10:48:34.726783 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4cc01b93-0735-4f89-8d78-a06458df6ee3" path="/var/lib/kubelet/pods/4cc01b93-0735-4f89-8d78-a06458df6ee3/volumes" Sep 29 10:48:38 crc kubenswrapper[4779]: I0929 10:48:38.736983 4779 scope.go:117] "RemoveContainer" containerID="1865f24d23363df0a5c4badf3f6f96c7f06b8d41471c03b2b2693d9d64bf4ee6" Sep 29 10:48:38 crc kubenswrapper[4779]: E0929 10:48:38.740112 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:48:44 crc kubenswrapper[4779]: I0929 10:48:44.975865 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-hpkrr"] Sep 29 10:48:44 crc kubenswrapper[4779]: E0929 10:48:44.977245 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4cc01b93-0735-4f89-8d78-a06458df6ee3" containerName="extract-content" Sep 29 10:48:44 crc kubenswrapper[4779]: I0929 10:48:44.977265 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="4cc01b93-0735-4f89-8d78-a06458df6ee3" containerName="extract-content" Sep 29 10:48:44 crc kubenswrapper[4779]: E0929 10:48:44.977286 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4cc01b93-0735-4f89-8d78-a06458df6ee3" containerName="registry-server" Sep 29 10:48:44 crc kubenswrapper[4779]: I0929 10:48:44.977293 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="4cc01b93-0735-4f89-8d78-a06458df6ee3" containerName="registry-server" Sep 29 10:48:44 crc kubenswrapper[4779]: E0929 10:48:44.977346 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4cc01b93-0735-4f89-8d78-a06458df6ee3" containerName="extract-utilities" Sep 29 10:48:44 crc kubenswrapper[4779]: I0929 10:48:44.977357 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="4cc01b93-0735-4f89-8d78-a06458df6ee3" containerName="extract-utilities" Sep 29 10:48:44 crc kubenswrapper[4779]: I0929 10:48:44.977555 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="4cc01b93-0735-4f89-8d78-a06458df6ee3" containerName="registry-server" Sep 29 10:48:44 crc kubenswrapper[4779]: I0929 10:48:44.979779 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hpkrr" Sep 29 10:48:45 crc kubenswrapper[4779]: I0929 10:48:45.002024 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hpkrr"] Sep 29 10:48:45 crc kubenswrapper[4779]: I0929 10:48:45.044727 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dfb56362-7962-4e92-a7e3-b0a80648c47b-catalog-content\") pod \"certified-operators-hpkrr\" (UID: \"dfb56362-7962-4e92-a7e3-b0a80648c47b\") " pod="openshift-marketplace/certified-operators-hpkrr" Sep 29 10:48:45 crc kubenswrapper[4779]: I0929 10:48:45.045315 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g9jqp\" (UniqueName: \"kubernetes.io/projected/dfb56362-7962-4e92-a7e3-b0a80648c47b-kube-api-access-g9jqp\") pod \"certified-operators-hpkrr\" (UID: \"dfb56362-7962-4e92-a7e3-b0a80648c47b\") " pod="openshift-marketplace/certified-operators-hpkrr" Sep 29 10:48:45 crc kubenswrapper[4779]: I0929 10:48:45.045436 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dfb56362-7962-4e92-a7e3-b0a80648c47b-utilities\") pod \"certified-operators-hpkrr\" (UID: \"dfb56362-7962-4e92-a7e3-b0a80648c47b\") " pod="openshift-marketplace/certified-operators-hpkrr" Sep 29 10:48:45 crc kubenswrapper[4779]: I0929 10:48:45.147693 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dfb56362-7962-4e92-a7e3-b0a80648c47b-catalog-content\") pod \"certified-operators-hpkrr\" (UID: \"dfb56362-7962-4e92-a7e3-b0a80648c47b\") " pod="openshift-marketplace/certified-operators-hpkrr" Sep 29 10:48:45 crc kubenswrapper[4779]: I0929 10:48:45.147757 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g9jqp\" (UniqueName: \"kubernetes.io/projected/dfb56362-7962-4e92-a7e3-b0a80648c47b-kube-api-access-g9jqp\") pod \"certified-operators-hpkrr\" (UID: \"dfb56362-7962-4e92-a7e3-b0a80648c47b\") " pod="openshift-marketplace/certified-operators-hpkrr" Sep 29 10:48:45 crc kubenswrapper[4779]: I0929 10:48:45.147850 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dfb56362-7962-4e92-a7e3-b0a80648c47b-utilities\") pod \"certified-operators-hpkrr\" (UID: \"dfb56362-7962-4e92-a7e3-b0a80648c47b\") " pod="openshift-marketplace/certified-operators-hpkrr" Sep 29 10:48:45 crc kubenswrapper[4779]: I0929 10:48:45.148340 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dfb56362-7962-4e92-a7e3-b0a80648c47b-catalog-content\") pod \"certified-operators-hpkrr\" (UID: \"dfb56362-7962-4e92-a7e3-b0a80648c47b\") " pod="openshift-marketplace/certified-operators-hpkrr" Sep 29 10:48:45 crc kubenswrapper[4779]: I0929 10:48:45.148563 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dfb56362-7962-4e92-a7e3-b0a80648c47b-utilities\") pod \"certified-operators-hpkrr\" (UID: \"dfb56362-7962-4e92-a7e3-b0a80648c47b\") " pod="openshift-marketplace/certified-operators-hpkrr" Sep 29 10:48:45 crc kubenswrapper[4779]: I0929 10:48:45.170521 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g9jqp\" (UniqueName: \"kubernetes.io/projected/dfb56362-7962-4e92-a7e3-b0a80648c47b-kube-api-access-g9jqp\") pod \"certified-operators-hpkrr\" (UID: \"dfb56362-7962-4e92-a7e3-b0a80648c47b\") " pod="openshift-marketplace/certified-operators-hpkrr" Sep 29 10:48:45 crc kubenswrapper[4779]: I0929 10:48:45.314781 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hpkrr" Sep 29 10:48:45 crc kubenswrapper[4779]: I0929 10:48:45.907329 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hpkrr"] Sep 29 10:48:46 crc kubenswrapper[4779]: I0929 10:48:46.862763 4779 generic.go:334] "Generic (PLEG): container finished" podID="dfb56362-7962-4e92-a7e3-b0a80648c47b" containerID="a5e4a7b6b01b7df4c7b0104735bbf97d10b0b9b59b65e6d852ee13bcdaddc92b" exitCode=0 Sep 29 10:48:46 crc kubenswrapper[4779]: I0929 10:48:46.862945 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hpkrr" event={"ID":"dfb56362-7962-4e92-a7e3-b0a80648c47b","Type":"ContainerDied","Data":"a5e4a7b6b01b7df4c7b0104735bbf97d10b0b9b59b65e6d852ee13bcdaddc92b"} Sep 29 10:48:46 crc kubenswrapper[4779]: I0929 10:48:46.863985 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hpkrr" event={"ID":"dfb56362-7962-4e92-a7e3-b0a80648c47b","Type":"ContainerStarted","Data":"281b44bef7973352f2903c94f78266cbab520dd69e2ea63c970e102c7f524c04"} Sep 29 10:48:49 crc kubenswrapper[4779]: I0929 10:48:49.894796 4779 generic.go:334] "Generic (PLEG): container finished" podID="dfb56362-7962-4e92-a7e3-b0a80648c47b" containerID="11bd4b1d525e13d752b7e1f3b128c6238a72b468c478c9965f220ce958eafdee" exitCode=0 Sep 29 10:48:49 crc kubenswrapper[4779]: I0929 10:48:49.895217 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hpkrr" event={"ID":"dfb56362-7962-4e92-a7e3-b0a80648c47b","Type":"ContainerDied","Data":"11bd4b1d525e13d752b7e1f3b128c6238a72b468c478c9965f220ce958eafdee"} Sep 29 10:48:50 crc kubenswrapper[4779]: I0929 10:48:50.908983 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hpkrr" event={"ID":"dfb56362-7962-4e92-a7e3-b0a80648c47b","Type":"ContainerStarted","Data":"30c82445bec3d15baa1344a4f88dd4dab295cbb947237dadc6bc866caf3ef300"} Sep 29 10:48:50 crc kubenswrapper[4779]: I0929 10:48:50.931450 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-hpkrr" podStartSLOduration=3.488812825 podStartE2EDuration="6.931422677s" podCreationTimestamp="2025-09-29 10:48:44 +0000 UTC" firstStartedPulling="2025-09-29 10:48:46.86526916 +0000 UTC m=+4758.846593084" lastFinishedPulling="2025-09-29 10:48:50.307879022 +0000 UTC m=+4762.289202936" observedRunningTime="2025-09-29 10:48:50.926619377 +0000 UTC m=+4762.907943291" watchObservedRunningTime="2025-09-29 10:48:50.931422677 +0000 UTC m=+4762.912746581" Sep 29 10:48:51 crc kubenswrapper[4779]: I0929 10:48:51.715663 4779 scope.go:117] "RemoveContainer" containerID="1865f24d23363df0a5c4badf3f6f96c7f06b8d41471c03b2b2693d9d64bf4ee6" Sep 29 10:48:51 crc kubenswrapper[4779]: E0929 10:48:51.716771 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:48:55 crc kubenswrapper[4779]: I0929 10:48:55.315162 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-hpkrr" Sep 29 10:48:55 crc kubenswrapper[4779]: I0929 10:48:55.315863 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-hpkrr" Sep 29 10:48:55 crc kubenswrapper[4779]: I0929 10:48:55.401338 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-hpkrr" Sep 29 10:48:56 crc kubenswrapper[4779]: I0929 10:48:56.460992 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-hpkrr" Sep 29 10:48:56 crc kubenswrapper[4779]: I0929 10:48:56.752867 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hpkrr"] Sep 29 10:48:57 crc kubenswrapper[4779]: I0929 10:48:57.991995 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-hpkrr" podUID="dfb56362-7962-4e92-a7e3-b0a80648c47b" containerName="registry-server" containerID="cri-o://30c82445bec3d15baa1344a4f88dd4dab295cbb947237dadc6bc866caf3ef300" gracePeriod=2 Sep 29 10:48:58 crc kubenswrapper[4779]: I0929 10:48:58.515732 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hpkrr" Sep 29 10:48:58 crc kubenswrapper[4779]: I0929 10:48:58.590788 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g9jqp\" (UniqueName: \"kubernetes.io/projected/dfb56362-7962-4e92-a7e3-b0a80648c47b-kube-api-access-g9jqp\") pod \"dfb56362-7962-4e92-a7e3-b0a80648c47b\" (UID: \"dfb56362-7962-4e92-a7e3-b0a80648c47b\") " Sep 29 10:48:58 crc kubenswrapper[4779]: I0929 10:48:58.590960 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dfb56362-7962-4e92-a7e3-b0a80648c47b-catalog-content\") pod \"dfb56362-7962-4e92-a7e3-b0a80648c47b\" (UID: \"dfb56362-7962-4e92-a7e3-b0a80648c47b\") " Sep 29 10:48:58 crc kubenswrapper[4779]: I0929 10:48:58.591009 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dfb56362-7962-4e92-a7e3-b0a80648c47b-utilities\") pod \"dfb56362-7962-4e92-a7e3-b0a80648c47b\" (UID: \"dfb56362-7962-4e92-a7e3-b0a80648c47b\") " Sep 29 10:48:58 crc kubenswrapper[4779]: I0929 10:48:58.591864 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dfb56362-7962-4e92-a7e3-b0a80648c47b-utilities" (OuterVolumeSpecName: "utilities") pod "dfb56362-7962-4e92-a7e3-b0a80648c47b" (UID: "dfb56362-7962-4e92-a7e3-b0a80648c47b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 10:48:58 crc kubenswrapper[4779]: I0929 10:48:58.598730 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dfb56362-7962-4e92-a7e3-b0a80648c47b-kube-api-access-g9jqp" (OuterVolumeSpecName: "kube-api-access-g9jqp") pod "dfb56362-7962-4e92-a7e3-b0a80648c47b" (UID: "dfb56362-7962-4e92-a7e3-b0a80648c47b"). InnerVolumeSpecName "kube-api-access-g9jqp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 10:48:58 crc kubenswrapper[4779]: I0929 10:48:58.644227 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dfb56362-7962-4e92-a7e3-b0a80648c47b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "dfb56362-7962-4e92-a7e3-b0a80648c47b" (UID: "dfb56362-7962-4e92-a7e3-b0a80648c47b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 10:48:58 crc kubenswrapper[4779]: I0929 10:48:58.693436 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g9jqp\" (UniqueName: \"kubernetes.io/projected/dfb56362-7962-4e92-a7e3-b0a80648c47b-kube-api-access-g9jqp\") on node \"crc\" DevicePath \"\"" Sep 29 10:48:58 crc kubenswrapper[4779]: I0929 10:48:58.693477 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dfb56362-7962-4e92-a7e3-b0a80648c47b-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 10:48:58 crc kubenswrapper[4779]: I0929 10:48:58.693486 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dfb56362-7962-4e92-a7e3-b0a80648c47b-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 10:48:59 crc kubenswrapper[4779]: I0929 10:48:59.004080 4779 generic.go:334] "Generic (PLEG): container finished" podID="dfb56362-7962-4e92-a7e3-b0a80648c47b" containerID="30c82445bec3d15baa1344a4f88dd4dab295cbb947237dadc6bc866caf3ef300" exitCode=0 Sep 29 10:48:59 crc kubenswrapper[4779]: I0929 10:48:59.004164 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hpkrr" Sep 29 10:48:59 crc kubenswrapper[4779]: I0929 10:48:59.004176 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hpkrr" event={"ID":"dfb56362-7962-4e92-a7e3-b0a80648c47b","Type":"ContainerDied","Data":"30c82445bec3d15baa1344a4f88dd4dab295cbb947237dadc6bc866caf3ef300"} Sep 29 10:48:59 crc kubenswrapper[4779]: I0929 10:48:59.004612 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hpkrr" event={"ID":"dfb56362-7962-4e92-a7e3-b0a80648c47b","Type":"ContainerDied","Data":"281b44bef7973352f2903c94f78266cbab520dd69e2ea63c970e102c7f524c04"} Sep 29 10:48:59 crc kubenswrapper[4779]: I0929 10:48:59.004653 4779 scope.go:117] "RemoveContainer" containerID="30c82445bec3d15baa1344a4f88dd4dab295cbb947237dadc6bc866caf3ef300" Sep 29 10:48:59 crc kubenswrapper[4779]: I0929 10:48:59.030192 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hpkrr"] Sep 29 10:48:59 crc kubenswrapper[4779]: I0929 10:48:59.037531 4779 scope.go:117] "RemoveContainer" containerID="11bd4b1d525e13d752b7e1f3b128c6238a72b468c478c9965f220ce958eafdee" Sep 29 10:48:59 crc kubenswrapper[4779]: I0929 10:48:59.039514 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-hpkrr"] Sep 29 10:48:59 crc kubenswrapper[4779]: I0929 10:48:59.069764 4779 scope.go:117] "RemoveContainer" containerID="a5e4a7b6b01b7df4c7b0104735bbf97d10b0b9b59b65e6d852ee13bcdaddc92b" Sep 29 10:48:59 crc kubenswrapper[4779]: I0929 10:48:59.111114 4779 scope.go:117] "RemoveContainer" containerID="30c82445bec3d15baa1344a4f88dd4dab295cbb947237dadc6bc866caf3ef300" Sep 29 10:48:59 crc kubenswrapper[4779]: E0929 10:48:59.111725 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"30c82445bec3d15baa1344a4f88dd4dab295cbb947237dadc6bc866caf3ef300\": container with ID starting with 30c82445bec3d15baa1344a4f88dd4dab295cbb947237dadc6bc866caf3ef300 not found: ID does not exist" containerID="30c82445bec3d15baa1344a4f88dd4dab295cbb947237dadc6bc866caf3ef300" Sep 29 10:48:59 crc kubenswrapper[4779]: I0929 10:48:59.111768 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"30c82445bec3d15baa1344a4f88dd4dab295cbb947237dadc6bc866caf3ef300"} err="failed to get container status \"30c82445bec3d15baa1344a4f88dd4dab295cbb947237dadc6bc866caf3ef300\": rpc error: code = NotFound desc = could not find container \"30c82445bec3d15baa1344a4f88dd4dab295cbb947237dadc6bc866caf3ef300\": container with ID starting with 30c82445bec3d15baa1344a4f88dd4dab295cbb947237dadc6bc866caf3ef300 not found: ID does not exist" Sep 29 10:48:59 crc kubenswrapper[4779]: I0929 10:48:59.111799 4779 scope.go:117] "RemoveContainer" containerID="11bd4b1d525e13d752b7e1f3b128c6238a72b468c478c9965f220ce958eafdee" Sep 29 10:48:59 crc kubenswrapper[4779]: E0929 10:48:59.112288 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"11bd4b1d525e13d752b7e1f3b128c6238a72b468c478c9965f220ce958eafdee\": container with ID starting with 11bd4b1d525e13d752b7e1f3b128c6238a72b468c478c9965f220ce958eafdee not found: ID does not exist" containerID="11bd4b1d525e13d752b7e1f3b128c6238a72b468c478c9965f220ce958eafdee" Sep 29 10:48:59 crc kubenswrapper[4779]: I0929 10:48:59.112345 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"11bd4b1d525e13d752b7e1f3b128c6238a72b468c478c9965f220ce958eafdee"} err="failed to get container status \"11bd4b1d525e13d752b7e1f3b128c6238a72b468c478c9965f220ce958eafdee\": rpc error: code = NotFound desc = could not find container \"11bd4b1d525e13d752b7e1f3b128c6238a72b468c478c9965f220ce958eafdee\": container with ID starting with 11bd4b1d525e13d752b7e1f3b128c6238a72b468c478c9965f220ce958eafdee not found: ID does not exist" Sep 29 10:48:59 crc kubenswrapper[4779]: I0929 10:48:59.112382 4779 scope.go:117] "RemoveContainer" containerID="a5e4a7b6b01b7df4c7b0104735bbf97d10b0b9b59b65e6d852ee13bcdaddc92b" Sep 29 10:48:59 crc kubenswrapper[4779]: E0929 10:48:59.112808 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a5e4a7b6b01b7df4c7b0104735bbf97d10b0b9b59b65e6d852ee13bcdaddc92b\": container with ID starting with a5e4a7b6b01b7df4c7b0104735bbf97d10b0b9b59b65e6d852ee13bcdaddc92b not found: ID does not exist" containerID="a5e4a7b6b01b7df4c7b0104735bbf97d10b0b9b59b65e6d852ee13bcdaddc92b" Sep 29 10:48:59 crc kubenswrapper[4779]: I0929 10:48:59.112845 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a5e4a7b6b01b7df4c7b0104735bbf97d10b0b9b59b65e6d852ee13bcdaddc92b"} err="failed to get container status \"a5e4a7b6b01b7df4c7b0104735bbf97d10b0b9b59b65e6d852ee13bcdaddc92b\": rpc error: code = NotFound desc = could not find container \"a5e4a7b6b01b7df4c7b0104735bbf97d10b0b9b59b65e6d852ee13bcdaddc92b\": container with ID starting with a5e4a7b6b01b7df4c7b0104735bbf97d10b0b9b59b65e6d852ee13bcdaddc92b not found: ID does not exist" Sep 29 10:49:00 crc kubenswrapper[4779]: I0929 10:49:00.727636 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dfb56362-7962-4e92-a7e3-b0a80648c47b" path="/var/lib/kubelet/pods/dfb56362-7962-4e92-a7e3-b0a80648c47b/volumes" Sep 29 10:49:04 crc kubenswrapper[4779]: I0929 10:49:04.714631 4779 scope.go:117] "RemoveContainer" containerID="1865f24d23363df0a5c4badf3f6f96c7f06b8d41471c03b2b2693d9d64bf4ee6" Sep 29 10:49:04 crc kubenswrapper[4779]: E0929 10:49:04.715761 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:49:15 crc kubenswrapper[4779]: I0929 10:49:15.714722 4779 scope.go:117] "RemoveContainer" containerID="1865f24d23363df0a5c4badf3f6f96c7f06b8d41471c03b2b2693d9d64bf4ee6" Sep 29 10:49:15 crc kubenswrapper[4779]: E0929 10:49:15.715738 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:49:27 crc kubenswrapper[4779]: I0929 10:49:27.714685 4779 scope.go:117] "RemoveContainer" containerID="1865f24d23363df0a5c4badf3f6f96c7f06b8d41471c03b2b2693d9d64bf4ee6" Sep 29 10:49:27 crc kubenswrapper[4779]: E0929 10:49:27.715961 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:49:40 crc kubenswrapper[4779]: I0929 10:49:40.715484 4779 scope.go:117] "RemoveContainer" containerID="1865f24d23363df0a5c4badf3f6f96c7f06b8d41471c03b2b2693d9d64bf4ee6" Sep 29 10:49:40 crc kubenswrapper[4779]: E0929 10:49:40.716563 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:49:53 crc kubenswrapper[4779]: I0929 10:49:53.714605 4779 scope.go:117] "RemoveContainer" containerID="1865f24d23363df0a5c4badf3f6f96c7f06b8d41471c03b2b2693d9d64bf4ee6" Sep 29 10:49:53 crc kubenswrapper[4779]: E0929 10:49:53.715548 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:50:07 crc kubenswrapper[4779]: I0929 10:50:07.715332 4779 scope.go:117] "RemoveContainer" containerID="1865f24d23363df0a5c4badf3f6f96c7f06b8d41471c03b2b2693d9d64bf4ee6" Sep 29 10:50:07 crc kubenswrapper[4779]: E0929 10:50:07.718070 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:50:20 crc kubenswrapper[4779]: I0929 10:50:20.715175 4779 scope.go:117] "RemoveContainer" containerID="1865f24d23363df0a5c4badf3f6f96c7f06b8d41471c03b2b2693d9d64bf4ee6" Sep 29 10:50:20 crc kubenswrapper[4779]: E0929 10:50:20.716198 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:50:31 crc kubenswrapper[4779]: I0929 10:50:31.714836 4779 scope.go:117] "RemoveContainer" containerID="1865f24d23363df0a5c4badf3f6f96c7f06b8d41471c03b2b2693d9d64bf4ee6" Sep 29 10:50:31 crc kubenswrapper[4779]: E0929 10:50:31.716127 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:50:42 crc kubenswrapper[4779]: I0929 10:50:42.715323 4779 scope.go:117] "RemoveContainer" containerID="1865f24d23363df0a5c4badf3f6f96c7f06b8d41471c03b2b2693d9d64bf4ee6" Sep 29 10:50:42 crc kubenswrapper[4779]: E0929 10:50:42.716650 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:50:53 crc kubenswrapper[4779]: I0929 10:50:53.715030 4779 scope.go:117] "RemoveContainer" containerID="1865f24d23363df0a5c4badf3f6f96c7f06b8d41471c03b2b2693d9d64bf4ee6" Sep 29 10:50:54 crc kubenswrapper[4779]: I0929 10:50:54.262058 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" event={"ID":"f1a5d3a7-37d9-4a87-864c-e4af7f504a19","Type":"ContainerStarted","Data":"6377a2fa110ce4d53ebc59182b719c7dd5068de2049c7a6be9b505163f964133"} Sep 29 10:51:19 crc kubenswrapper[4779]: I0929 10:51:19.118260 4779 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/openstack-galera-0" podUID="bb5f88d2-6663-4ed6-a7a7-93ee500c9edf" containerName="galera" probeResult="failure" output="command timed out" Sep 29 10:51:19 crc kubenswrapper[4779]: I0929 10:51:19.121794 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/openstack-galera-0" podUID="bb5f88d2-6663-4ed6-a7a7-93ee500c9edf" containerName="galera" probeResult="failure" output="command timed out" Sep 29 10:52:16 crc kubenswrapper[4779]: I0929 10:52:16.187053 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-tkjj7"] Sep 29 10:52:16 crc kubenswrapper[4779]: E0929 10:52:16.188532 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dfb56362-7962-4e92-a7e3-b0a80648c47b" containerName="extract-content" Sep 29 10:52:16 crc kubenswrapper[4779]: I0929 10:52:16.188552 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="dfb56362-7962-4e92-a7e3-b0a80648c47b" containerName="extract-content" Sep 29 10:52:16 crc kubenswrapper[4779]: E0929 10:52:16.188574 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dfb56362-7962-4e92-a7e3-b0a80648c47b" containerName="registry-server" Sep 29 10:52:16 crc kubenswrapper[4779]: I0929 10:52:16.188582 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="dfb56362-7962-4e92-a7e3-b0a80648c47b" containerName="registry-server" Sep 29 10:52:16 crc kubenswrapper[4779]: E0929 10:52:16.188607 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dfb56362-7962-4e92-a7e3-b0a80648c47b" containerName="extract-utilities" Sep 29 10:52:16 crc kubenswrapper[4779]: I0929 10:52:16.188615 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="dfb56362-7962-4e92-a7e3-b0a80648c47b" containerName="extract-utilities" Sep 29 10:52:16 crc kubenswrapper[4779]: I0929 10:52:16.188852 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="dfb56362-7962-4e92-a7e3-b0a80648c47b" containerName="registry-server" Sep 29 10:52:16 crc kubenswrapper[4779]: I0929 10:52:16.191083 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tkjj7" Sep 29 10:52:16 crc kubenswrapper[4779]: I0929 10:52:16.207671 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tkjj7"] Sep 29 10:52:16 crc kubenswrapper[4779]: I0929 10:52:16.346696 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q9ppt\" (UniqueName: \"kubernetes.io/projected/186581a3-be02-43fe-bec4-dee0f4abdf2a-kube-api-access-q9ppt\") pod \"redhat-marketplace-tkjj7\" (UID: \"186581a3-be02-43fe-bec4-dee0f4abdf2a\") " pod="openshift-marketplace/redhat-marketplace-tkjj7" Sep 29 10:52:16 crc kubenswrapper[4779]: I0929 10:52:16.346769 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/186581a3-be02-43fe-bec4-dee0f4abdf2a-catalog-content\") pod \"redhat-marketplace-tkjj7\" (UID: \"186581a3-be02-43fe-bec4-dee0f4abdf2a\") " pod="openshift-marketplace/redhat-marketplace-tkjj7" Sep 29 10:52:16 crc kubenswrapper[4779]: I0929 10:52:16.346847 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/186581a3-be02-43fe-bec4-dee0f4abdf2a-utilities\") pod \"redhat-marketplace-tkjj7\" (UID: \"186581a3-be02-43fe-bec4-dee0f4abdf2a\") " pod="openshift-marketplace/redhat-marketplace-tkjj7" Sep 29 10:52:16 crc kubenswrapper[4779]: I0929 10:52:16.449059 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/186581a3-be02-43fe-bec4-dee0f4abdf2a-utilities\") pod \"redhat-marketplace-tkjj7\" (UID: \"186581a3-be02-43fe-bec4-dee0f4abdf2a\") " pod="openshift-marketplace/redhat-marketplace-tkjj7" Sep 29 10:52:16 crc kubenswrapper[4779]: I0929 10:52:16.449531 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q9ppt\" (UniqueName: \"kubernetes.io/projected/186581a3-be02-43fe-bec4-dee0f4abdf2a-kube-api-access-q9ppt\") pod \"redhat-marketplace-tkjj7\" (UID: \"186581a3-be02-43fe-bec4-dee0f4abdf2a\") " pod="openshift-marketplace/redhat-marketplace-tkjj7" Sep 29 10:52:16 crc kubenswrapper[4779]: I0929 10:52:16.449581 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/186581a3-be02-43fe-bec4-dee0f4abdf2a-catalog-content\") pod \"redhat-marketplace-tkjj7\" (UID: \"186581a3-be02-43fe-bec4-dee0f4abdf2a\") " pod="openshift-marketplace/redhat-marketplace-tkjj7" Sep 29 10:52:16 crc kubenswrapper[4779]: I0929 10:52:16.450115 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/186581a3-be02-43fe-bec4-dee0f4abdf2a-catalog-content\") pod \"redhat-marketplace-tkjj7\" (UID: \"186581a3-be02-43fe-bec4-dee0f4abdf2a\") " pod="openshift-marketplace/redhat-marketplace-tkjj7" Sep 29 10:52:16 crc kubenswrapper[4779]: I0929 10:52:16.450419 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/186581a3-be02-43fe-bec4-dee0f4abdf2a-utilities\") pod \"redhat-marketplace-tkjj7\" (UID: \"186581a3-be02-43fe-bec4-dee0f4abdf2a\") " pod="openshift-marketplace/redhat-marketplace-tkjj7" Sep 29 10:52:16 crc kubenswrapper[4779]: I0929 10:52:16.476241 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q9ppt\" (UniqueName: \"kubernetes.io/projected/186581a3-be02-43fe-bec4-dee0f4abdf2a-kube-api-access-q9ppt\") pod \"redhat-marketplace-tkjj7\" (UID: \"186581a3-be02-43fe-bec4-dee0f4abdf2a\") " pod="openshift-marketplace/redhat-marketplace-tkjj7" Sep 29 10:52:16 crc kubenswrapper[4779]: I0929 10:52:16.524471 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tkjj7" Sep 29 10:52:17 crc kubenswrapper[4779]: I0929 10:52:17.042841 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tkjj7"] Sep 29 10:52:18 crc kubenswrapper[4779]: I0929 10:52:18.259205 4779 generic.go:334] "Generic (PLEG): container finished" podID="186581a3-be02-43fe-bec4-dee0f4abdf2a" containerID="53babb72a9302a0d6034cb649e7b09303266b3c306fc57ce1e506894f787bcb3" exitCode=0 Sep 29 10:52:18 crc kubenswrapper[4779]: I0929 10:52:18.259912 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tkjj7" event={"ID":"186581a3-be02-43fe-bec4-dee0f4abdf2a","Type":"ContainerDied","Data":"53babb72a9302a0d6034cb649e7b09303266b3c306fc57ce1e506894f787bcb3"} Sep 29 10:52:18 crc kubenswrapper[4779]: I0929 10:52:18.259970 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tkjj7" event={"ID":"186581a3-be02-43fe-bec4-dee0f4abdf2a","Type":"ContainerStarted","Data":"f6ae4a468582d5068f1c1073833b4e0d903c8f5ce404c0cd0091d6767fcd87da"} Sep 29 10:52:19 crc kubenswrapper[4779]: I0929 10:52:19.276331 4779 generic.go:334] "Generic (PLEG): container finished" podID="186581a3-be02-43fe-bec4-dee0f4abdf2a" containerID="35e954eaa4295eab434806cde97692364dedf63af06ebcdb54667841f94c8490" exitCode=0 Sep 29 10:52:19 crc kubenswrapper[4779]: I0929 10:52:19.276616 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tkjj7" event={"ID":"186581a3-be02-43fe-bec4-dee0f4abdf2a","Type":"ContainerDied","Data":"35e954eaa4295eab434806cde97692364dedf63af06ebcdb54667841f94c8490"} Sep 29 10:52:20 crc kubenswrapper[4779]: I0929 10:52:20.293227 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tkjj7" event={"ID":"186581a3-be02-43fe-bec4-dee0f4abdf2a","Type":"ContainerStarted","Data":"d30cf5a57464ef190163fd3646a536b8513e7630569ed23a8fe9d8eefc393f95"} Sep 29 10:52:20 crc kubenswrapper[4779]: I0929 10:52:20.318308 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-tkjj7" podStartSLOduration=2.925852888 podStartE2EDuration="4.318286698s" podCreationTimestamp="2025-09-29 10:52:16 +0000 UTC" firstStartedPulling="2025-09-29 10:52:18.263883635 +0000 UTC m=+4970.245207539" lastFinishedPulling="2025-09-29 10:52:19.656317445 +0000 UTC m=+4971.637641349" observedRunningTime="2025-09-29 10:52:20.317538116 +0000 UTC m=+4972.298862040" watchObservedRunningTime="2025-09-29 10:52:20.318286698 +0000 UTC m=+4972.299610612" Sep 29 10:52:26 crc kubenswrapper[4779]: I0929 10:52:26.525749 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-tkjj7" Sep 29 10:52:26 crc kubenswrapper[4779]: I0929 10:52:26.526784 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-tkjj7" Sep 29 10:52:26 crc kubenswrapper[4779]: I0929 10:52:26.585499 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-tkjj7" Sep 29 10:52:27 crc kubenswrapper[4779]: I0929 10:52:27.660338 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-tkjj7" Sep 29 10:52:27 crc kubenswrapper[4779]: I0929 10:52:27.709975 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-tkjj7"] Sep 29 10:52:29 crc kubenswrapper[4779]: I0929 10:52:29.380894 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-tkjj7" podUID="186581a3-be02-43fe-bec4-dee0f4abdf2a" containerName="registry-server" containerID="cri-o://d30cf5a57464ef190163fd3646a536b8513e7630569ed23a8fe9d8eefc393f95" gracePeriod=2 Sep 29 10:52:30 crc kubenswrapper[4779]: I0929 10:52:30.395093 4779 generic.go:334] "Generic (PLEG): container finished" podID="186581a3-be02-43fe-bec4-dee0f4abdf2a" containerID="d30cf5a57464ef190163fd3646a536b8513e7630569ed23a8fe9d8eefc393f95" exitCode=0 Sep 29 10:52:30 crc kubenswrapper[4779]: I0929 10:52:30.395162 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tkjj7" event={"ID":"186581a3-be02-43fe-bec4-dee0f4abdf2a","Type":"ContainerDied","Data":"d30cf5a57464ef190163fd3646a536b8513e7630569ed23a8fe9d8eefc393f95"} Sep 29 10:52:30 crc kubenswrapper[4779]: I0929 10:52:30.395524 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tkjj7" event={"ID":"186581a3-be02-43fe-bec4-dee0f4abdf2a","Type":"ContainerDied","Data":"f6ae4a468582d5068f1c1073833b4e0d903c8f5ce404c0cd0091d6767fcd87da"} Sep 29 10:52:30 crc kubenswrapper[4779]: I0929 10:52:30.395546 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f6ae4a468582d5068f1c1073833b4e0d903c8f5ce404c0cd0091d6767fcd87da" Sep 29 10:52:30 crc kubenswrapper[4779]: I0929 10:52:30.564636 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tkjj7" Sep 29 10:52:30 crc kubenswrapper[4779]: I0929 10:52:30.659451 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q9ppt\" (UniqueName: \"kubernetes.io/projected/186581a3-be02-43fe-bec4-dee0f4abdf2a-kube-api-access-q9ppt\") pod \"186581a3-be02-43fe-bec4-dee0f4abdf2a\" (UID: \"186581a3-be02-43fe-bec4-dee0f4abdf2a\") " Sep 29 10:52:30 crc kubenswrapper[4779]: I0929 10:52:30.659518 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/186581a3-be02-43fe-bec4-dee0f4abdf2a-catalog-content\") pod \"186581a3-be02-43fe-bec4-dee0f4abdf2a\" (UID: \"186581a3-be02-43fe-bec4-dee0f4abdf2a\") " Sep 29 10:52:30 crc kubenswrapper[4779]: I0929 10:52:30.659870 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/186581a3-be02-43fe-bec4-dee0f4abdf2a-utilities\") pod \"186581a3-be02-43fe-bec4-dee0f4abdf2a\" (UID: \"186581a3-be02-43fe-bec4-dee0f4abdf2a\") " Sep 29 10:52:30 crc kubenswrapper[4779]: I0929 10:52:30.660973 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/186581a3-be02-43fe-bec4-dee0f4abdf2a-utilities" (OuterVolumeSpecName: "utilities") pod "186581a3-be02-43fe-bec4-dee0f4abdf2a" (UID: "186581a3-be02-43fe-bec4-dee0f4abdf2a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 10:52:30 crc kubenswrapper[4779]: I0929 10:52:30.676649 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/186581a3-be02-43fe-bec4-dee0f4abdf2a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "186581a3-be02-43fe-bec4-dee0f4abdf2a" (UID: "186581a3-be02-43fe-bec4-dee0f4abdf2a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 10:52:30 crc kubenswrapper[4779]: I0929 10:52:30.677964 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/186581a3-be02-43fe-bec4-dee0f4abdf2a-kube-api-access-q9ppt" (OuterVolumeSpecName: "kube-api-access-q9ppt") pod "186581a3-be02-43fe-bec4-dee0f4abdf2a" (UID: "186581a3-be02-43fe-bec4-dee0f4abdf2a"). InnerVolumeSpecName "kube-api-access-q9ppt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 10:52:30 crc kubenswrapper[4779]: I0929 10:52:30.763486 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/186581a3-be02-43fe-bec4-dee0f4abdf2a-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 10:52:30 crc kubenswrapper[4779]: I0929 10:52:30.763538 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q9ppt\" (UniqueName: \"kubernetes.io/projected/186581a3-be02-43fe-bec4-dee0f4abdf2a-kube-api-access-q9ppt\") on node \"crc\" DevicePath \"\"" Sep 29 10:52:30 crc kubenswrapper[4779]: I0929 10:52:30.763554 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/186581a3-be02-43fe-bec4-dee0f4abdf2a-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 10:52:31 crc kubenswrapper[4779]: I0929 10:52:31.406998 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tkjj7" Sep 29 10:52:31 crc kubenswrapper[4779]: I0929 10:52:31.454109 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-tkjj7"] Sep 29 10:52:31 crc kubenswrapper[4779]: I0929 10:52:31.466821 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-tkjj7"] Sep 29 10:52:32 crc kubenswrapper[4779]: I0929 10:52:32.728792 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="186581a3-be02-43fe-bec4-dee0f4abdf2a" path="/var/lib/kubelet/pods/186581a3-be02-43fe-bec4-dee0f4abdf2a/volumes" Sep 29 10:53:06 crc kubenswrapper[4779]: I0929 10:53:06.348638 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-nrhxs"] Sep 29 10:53:06 crc kubenswrapper[4779]: E0929 10:53:06.350063 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="186581a3-be02-43fe-bec4-dee0f4abdf2a" containerName="extract-utilities" Sep 29 10:53:06 crc kubenswrapper[4779]: I0929 10:53:06.350081 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="186581a3-be02-43fe-bec4-dee0f4abdf2a" containerName="extract-utilities" Sep 29 10:53:06 crc kubenswrapper[4779]: E0929 10:53:06.350090 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="186581a3-be02-43fe-bec4-dee0f4abdf2a" containerName="extract-content" Sep 29 10:53:06 crc kubenswrapper[4779]: I0929 10:53:06.350097 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="186581a3-be02-43fe-bec4-dee0f4abdf2a" containerName="extract-content" Sep 29 10:53:06 crc kubenswrapper[4779]: E0929 10:53:06.350131 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="186581a3-be02-43fe-bec4-dee0f4abdf2a" containerName="registry-server" Sep 29 10:53:06 crc kubenswrapper[4779]: I0929 10:53:06.350138 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="186581a3-be02-43fe-bec4-dee0f4abdf2a" containerName="registry-server" Sep 29 10:53:06 crc kubenswrapper[4779]: I0929 10:53:06.350334 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="186581a3-be02-43fe-bec4-dee0f4abdf2a" containerName="registry-server" Sep 29 10:53:06 crc kubenswrapper[4779]: I0929 10:53:06.352170 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nrhxs" Sep 29 10:53:06 crc kubenswrapper[4779]: I0929 10:53:06.364636 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-nrhxs"] Sep 29 10:53:06 crc kubenswrapper[4779]: I0929 10:53:06.506400 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d1fc6aa4-b4c0-4dca-8d26-a4662ba35e30-catalog-content\") pod \"redhat-operators-nrhxs\" (UID: \"d1fc6aa4-b4c0-4dca-8d26-a4662ba35e30\") " pod="openshift-marketplace/redhat-operators-nrhxs" Sep 29 10:53:06 crc kubenswrapper[4779]: I0929 10:53:06.506496 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bf4zk\" (UniqueName: \"kubernetes.io/projected/d1fc6aa4-b4c0-4dca-8d26-a4662ba35e30-kube-api-access-bf4zk\") pod \"redhat-operators-nrhxs\" (UID: \"d1fc6aa4-b4c0-4dca-8d26-a4662ba35e30\") " pod="openshift-marketplace/redhat-operators-nrhxs" Sep 29 10:53:06 crc kubenswrapper[4779]: I0929 10:53:06.506595 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d1fc6aa4-b4c0-4dca-8d26-a4662ba35e30-utilities\") pod \"redhat-operators-nrhxs\" (UID: \"d1fc6aa4-b4c0-4dca-8d26-a4662ba35e30\") " pod="openshift-marketplace/redhat-operators-nrhxs" Sep 29 10:53:06 crc kubenswrapper[4779]: I0929 10:53:06.609634 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d1fc6aa4-b4c0-4dca-8d26-a4662ba35e30-utilities\") pod \"redhat-operators-nrhxs\" (UID: \"d1fc6aa4-b4c0-4dca-8d26-a4662ba35e30\") " pod="openshift-marketplace/redhat-operators-nrhxs" Sep 29 10:53:06 crc kubenswrapper[4779]: I0929 10:53:06.609807 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d1fc6aa4-b4c0-4dca-8d26-a4662ba35e30-catalog-content\") pod \"redhat-operators-nrhxs\" (UID: \"d1fc6aa4-b4c0-4dca-8d26-a4662ba35e30\") " pod="openshift-marketplace/redhat-operators-nrhxs" Sep 29 10:53:06 crc kubenswrapper[4779]: I0929 10:53:06.609846 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bf4zk\" (UniqueName: \"kubernetes.io/projected/d1fc6aa4-b4c0-4dca-8d26-a4662ba35e30-kube-api-access-bf4zk\") pod \"redhat-operators-nrhxs\" (UID: \"d1fc6aa4-b4c0-4dca-8d26-a4662ba35e30\") " pod="openshift-marketplace/redhat-operators-nrhxs" Sep 29 10:53:06 crc kubenswrapper[4779]: I0929 10:53:06.610436 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d1fc6aa4-b4c0-4dca-8d26-a4662ba35e30-utilities\") pod \"redhat-operators-nrhxs\" (UID: \"d1fc6aa4-b4c0-4dca-8d26-a4662ba35e30\") " pod="openshift-marketplace/redhat-operators-nrhxs" Sep 29 10:53:06 crc kubenswrapper[4779]: I0929 10:53:06.610583 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d1fc6aa4-b4c0-4dca-8d26-a4662ba35e30-catalog-content\") pod \"redhat-operators-nrhxs\" (UID: \"d1fc6aa4-b4c0-4dca-8d26-a4662ba35e30\") " pod="openshift-marketplace/redhat-operators-nrhxs" Sep 29 10:53:06 crc kubenswrapper[4779]: I0929 10:53:06.644591 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bf4zk\" (UniqueName: \"kubernetes.io/projected/d1fc6aa4-b4c0-4dca-8d26-a4662ba35e30-kube-api-access-bf4zk\") pod \"redhat-operators-nrhxs\" (UID: \"d1fc6aa4-b4c0-4dca-8d26-a4662ba35e30\") " pod="openshift-marketplace/redhat-operators-nrhxs" Sep 29 10:53:06 crc kubenswrapper[4779]: I0929 10:53:06.715007 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nrhxs" Sep 29 10:53:07 crc kubenswrapper[4779]: I0929 10:53:07.238370 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-nrhxs"] Sep 29 10:53:07 crc kubenswrapper[4779]: I0929 10:53:07.777691 4779 generic.go:334] "Generic (PLEG): container finished" podID="d1fc6aa4-b4c0-4dca-8d26-a4662ba35e30" containerID="254b09e75703e74362008e004053c986242c94eaa6f88575fbd0b93efcbe5d76" exitCode=0 Sep 29 10:53:07 crc kubenswrapper[4779]: I0929 10:53:07.777805 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nrhxs" event={"ID":"d1fc6aa4-b4c0-4dca-8d26-a4662ba35e30","Type":"ContainerDied","Data":"254b09e75703e74362008e004053c986242c94eaa6f88575fbd0b93efcbe5d76"} Sep 29 10:53:07 crc kubenswrapper[4779]: I0929 10:53:07.778211 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nrhxs" event={"ID":"d1fc6aa4-b4c0-4dca-8d26-a4662ba35e30","Type":"ContainerStarted","Data":"e2801bbf827dc38485a19ebcb12debb43c815e9639148909f92eab6f2cd8511a"} Sep 29 10:53:09 crc kubenswrapper[4779]: I0929 10:53:09.803506 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nrhxs" event={"ID":"d1fc6aa4-b4c0-4dca-8d26-a4662ba35e30","Type":"ContainerStarted","Data":"c0173b8c9b18879f856ec75d4ab826b7669436a40462e339806e803ddc940b72"} Sep 29 10:53:14 crc kubenswrapper[4779]: I0929 10:53:14.858969 4779 generic.go:334] "Generic (PLEG): container finished" podID="d1fc6aa4-b4c0-4dca-8d26-a4662ba35e30" containerID="c0173b8c9b18879f856ec75d4ab826b7669436a40462e339806e803ddc940b72" exitCode=0 Sep 29 10:53:14 crc kubenswrapper[4779]: I0929 10:53:14.859000 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nrhxs" event={"ID":"d1fc6aa4-b4c0-4dca-8d26-a4662ba35e30","Type":"ContainerDied","Data":"c0173b8c9b18879f856ec75d4ab826b7669436a40462e339806e803ddc940b72"} Sep 29 10:53:15 crc kubenswrapper[4779]: I0929 10:53:15.873979 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nrhxs" event={"ID":"d1fc6aa4-b4c0-4dca-8d26-a4662ba35e30","Type":"ContainerStarted","Data":"d11e8379ac4cd6553148fbbb00f00c06c2d4b64b0ec0d5bb4e8fc39401557d47"} Sep 29 10:53:15 crc kubenswrapper[4779]: I0929 10:53:15.909826 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-nrhxs" podStartSLOduration=2.382998162 podStartE2EDuration="9.909800593s" podCreationTimestamp="2025-09-29 10:53:06 +0000 UTC" firstStartedPulling="2025-09-29 10:53:07.780137545 +0000 UTC m=+5019.761461459" lastFinishedPulling="2025-09-29 10:53:15.306939946 +0000 UTC m=+5027.288263890" observedRunningTime="2025-09-29 10:53:15.897627989 +0000 UTC m=+5027.878951893" watchObservedRunningTime="2025-09-29 10:53:15.909800593 +0000 UTC m=+5027.891124497" Sep 29 10:53:16 crc kubenswrapper[4779]: I0929 10:53:16.725054 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-nrhxs" Sep 29 10:53:16 crc kubenswrapper[4779]: I0929 10:53:16.725522 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-nrhxs" Sep 29 10:53:16 crc kubenswrapper[4779]: I0929 10:53:16.966698 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 10:53:16 crc kubenswrapper[4779]: I0929 10:53:16.966776 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 10:53:17 crc kubenswrapper[4779]: I0929 10:53:17.763423 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-nrhxs" podUID="d1fc6aa4-b4c0-4dca-8d26-a4662ba35e30" containerName="registry-server" probeResult="failure" output=< Sep 29 10:53:17 crc kubenswrapper[4779]: timeout: failed to connect service ":50051" within 1s Sep 29 10:53:17 crc kubenswrapper[4779]: > Sep 29 10:53:26 crc kubenswrapper[4779]: I0929 10:53:26.770984 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-nrhxs" Sep 29 10:53:26 crc kubenswrapper[4779]: I0929 10:53:26.823855 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-nrhxs" Sep 29 10:53:27 crc kubenswrapper[4779]: I0929 10:53:27.015177 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-nrhxs"] Sep 29 10:53:28 crc kubenswrapper[4779]: I0929 10:53:28.024939 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-nrhxs" podUID="d1fc6aa4-b4c0-4dca-8d26-a4662ba35e30" containerName="registry-server" containerID="cri-o://d11e8379ac4cd6553148fbbb00f00c06c2d4b64b0ec0d5bb4e8fc39401557d47" gracePeriod=2 Sep 29 10:53:28 crc kubenswrapper[4779]: I0929 10:53:28.538158 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nrhxs" Sep 29 10:53:28 crc kubenswrapper[4779]: I0929 10:53:28.702896 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d1fc6aa4-b4c0-4dca-8d26-a4662ba35e30-utilities\") pod \"d1fc6aa4-b4c0-4dca-8d26-a4662ba35e30\" (UID: \"d1fc6aa4-b4c0-4dca-8d26-a4662ba35e30\") " Sep 29 10:53:28 crc kubenswrapper[4779]: I0929 10:53:28.703641 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d1fc6aa4-b4c0-4dca-8d26-a4662ba35e30-catalog-content\") pod \"d1fc6aa4-b4c0-4dca-8d26-a4662ba35e30\" (UID: \"d1fc6aa4-b4c0-4dca-8d26-a4662ba35e30\") " Sep 29 10:53:28 crc kubenswrapper[4779]: I0929 10:53:28.703694 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf4zk\" (UniqueName: \"kubernetes.io/projected/d1fc6aa4-b4c0-4dca-8d26-a4662ba35e30-kube-api-access-bf4zk\") pod \"d1fc6aa4-b4c0-4dca-8d26-a4662ba35e30\" (UID: \"d1fc6aa4-b4c0-4dca-8d26-a4662ba35e30\") " Sep 29 10:53:28 crc kubenswrapper[4779]: I0929 10:53:28.704453 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d1fc6aa4-b4c0-4dca-8d26-a4662ba35e30-utilities" (OuterVolumeSpecName: "utilities") pod "d1fc6aa4-b4c0-4dca-8d26-a4662ba35e30" (UID: "d1fc6aa4-b4c0-4dca-8d26-a4662ba35e30"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 10:53:28 crc kubenswrapper[4779]: I0929 10:53:28.713312 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d1fc6aa4-b4c0-4dca-8d26-a4662ba35e30-kube-api-access-bf4zk" (OuterVolumeSpecName: "kube-api-access-bf4zk") pod "d1fc6aa4-b4c0-4dca-8d26-a4662ba35e30" (UID: "d1fc6aa4-b4c0-4dca-8d26-a4662ba35e30"). InnerVolumeSpecName "kube-api-access-bf4zk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 10:53:28 crc kubenswrapper[4779]: I0929 10:53:28.807354 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d1fc6aa4-b4c0-4dca-8d26-a4662ba35e30-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 10:53:28 crc kubenswrapper[4779]: I0929 10:53:28.807438 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf4zk\" (UniqueName: \"kubernetes.io/projected/d1fc6aa4-b4c0-4dca-8d26-a4662ba35e30-kube-api-access-bf4zk\") on node \"crc\" DevicePath \"\"" Sep 29 10:53:28 crc kubenswrapper[4779]: I0929 10:53:28.833429 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d1fc6aa4-b4c0-4dca-8d26-a4662ba35e30-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d1fc6aa4-b4c0-4dca-8d26-a4662ba35e30" (UID: "d1fc6aa4-b4c0-4dca-8d26-a4662ba35e30"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 10:53:28 crc kubenswrapper[4779]: I0929 10:53:28.915002 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d1fc6aa4-b4c0-4dca-8d26-a4662ba35e30-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 10:53:29 crc kubenswrapper[4779]: I0929 10:53:29.043168 4779 generic.go:334] "Generic (PLEG): container finished" podID="d1fc6aa4-b4c0-4dca-8d26-a4662ba35e30" containerID="d11e8379ac4cd6553148fbbb00f00c06c2d4b64b0ec0d5bb4e8fc39401557d47" exitCode=0 Sep 29 10:53:29 crc kubenswrapper[4779]: I0929 10:53:29.043222 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nrhxs" event={"ID":"d1fc6aa4-b4c0-4dca-8d26-a4662ba35e30","Type":"ContainerDied","Data":"d11e8379ac4cd6553148fbbb00f00c06c2d4b64b0ec0d5bb4e8fc39401557d47"} Sep 29 10:53:29 crc kubenswrapper[4779]: I0929 10:53:29.043255 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nrhxs" event={"ID":"d1fc6aa4-b4c0-4dca-8d26-a4662ba35e30","Type":"ContainerDied","Data":"e2801bbf827dc38485a19ebcb12debb43c815e9639148909f92eab6f2cd8511a"} Sep 29 10:53:29 crc kubenswrapper[4779]: I0929 10:53:29.043277 4779 scope.go:117] "RemoveContainer" containerID="d11e8379ac4cd6553148fbbb00f00c06c2d4b64b0ec0d5bb4e8fc39401557d47" Sep 29 10:53:29 crc kubenswrapper[4779]: I0929 10:53:29.043278 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nrhxs" Sep 29 10:53:29 crc kubenswrapper[4779]: I0929 10:53:29.080468 4779 scope.go:117] "RemoveContainer" containerID="c0173b8c9b18879f856ec75d4ab826b7669436a40462e339806e803ddc940b72" Sep 29 10:53:29 crc kubenswrapper[4779]: I0929 10:53:29.093137 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-nrhxs"] Sep 29 10:53:29 crc kubenswrapper[4779]: I0929 10:53:29.106549 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-nrhxs"] Sep 29 10:53:29 crc kubenswrapper[4779]: I0929 10:53:29.118490 4779 scope.go:117] "RemoveContainer" containerID="254b09e75703e74362008e004053c986242c94eaa6f88575fbd0b93efcbe5d76" Sep 29 10:53:29 crc kubenswrapper[4779]: I0929 10:53:29.190667 4779 scope.go:117] "RemoveContainer" containerID="d11e8379ac4cd6553148fbbb00f00c06c2d4b64b0ec0d5bb4e8fc39401557d47" Sep 29 10:53:29 crc kubenswrapper[4779]: E0929 10:53:29.191424 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d11e8379ac4cd6553148fbbb00f00c06c2d4b64b0ec0d5bb4e8fc39401557d47\": container with ID starting with d11e8379ac4cd6553148fbbb00f00c06c2d4b64b0ec0d5bb4e8fc39401557d47 not found: ID does not exist" containerID="d11e8379ac4cd6553148fbbb00f00c06c2d4b64b0ec0d5bb4e8fc39401557d47" Sep 29 10:53:29 crc kubenswrapper[4779]: I0929 10:53:29.191496 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d11e8379ac4cd6553148fbbb00f00c06c2d4b64b0ec0d5bb4e8fc39401557d47"} err="failed to get container status \"d11e8379ac4cd6553148fbbb00f00c06c2d4b64b0ec0d5bb4e8fc39401557d47\": rpc error: code = NotFound desc = could not find container \"d11e8379ac4cd6553148fbbb00f00c06c2d4b64b0ec0d5bb4e8fc39401557d47\": container with ID starting with d11e8379ac4cd6553148fbbb00f00c06c2d4b64b0ec0d5bb4e8fc39401557d47 not found: ID does not exist" Sep 29 10:53:29 crc kubenswrapper[4779]: I0929 10:53:29.191541 4779 scope.go:117] "RemoveContainer" containerID="c0173b8c9b18879f856ec75d4ab826b7669436a40462e339806e803ddc940b72" Sep 29 10:53:29 crc kubenswrapper[4779]: E0929 10:53:29.191868 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c0173b8c9b18879f856ec75d4ab826b7669436a40462e339806e803ddc940b72\": container with ID starting with c0173b8c9b18879f856ec75d4ab826b7669436a40462e339806e803ddc940b72 not found: ID does not exist" containerID="c0173b8c9b18879f856ec75d4ab826b7669436a40462e339806e803ddc940b72" Sep 29 10:53:29 crc kubenswrapper[4779]: I0929 10:53:29.191925 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c0173b8c9b18879f856ec75d4ab826b7669436a40462e339806e803ddc940b72"} err="failed to get container status \"c0173b8c9b18879f856ec75d4ab826b7669436a40462e339806e803ddc940b72\": rpc error: code = NotFound desc = could not find container \"c0173b8c9b18879f856ec75d4ab826b7669436a40462e339806e803ddc940b72\": container with ID starting with c0173b8c9b18879f856ec75d4ab826b7669436a40462e339806e803ddc940b72 not found: ID does not exist" Sep 29 10:53:29 crc kubenswrapper[4779]: I0929 10:53:29.191960 4779 scope.go:117] "RemoveContainer" containerID="254b09e75703e74362008e004053c986242c94eaa6f88575fbd0b93efcbe5d76" Sep 29 10:53:29 crc kubenswrapper[4779]: E0929 10:53:29.192762 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"254b09e75703e74362008e004053c986242c94eaa6f88575fbd0b93efcbe5d76\": container with ID starting with 254b09e75703e74362008e004053c986242c94eaa6f88575fbd0b93efcbe5d76 not found: ID does not exist" containerID="254b09e75703e74362008e004053c986242c94eaa6f88575fbd0b93efcbe5d76" Sep 29 10:53:29 crc kubenswrapper[4779]: I0929 10:53:29.192797 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"254b09e75703e74362008e004053c986242c94eaa6f88575fbd0b93efcbe5d76"} err="failed to get container status \"254b09e75703e74362008e004053c986242c94eaa6f88575fbd0b93efcbe5d76\": rpc error: code = NotFound desc = could not find container \"254b09e75703e74362008e004053c986242c94eaa6f88575fbd0b93efcbe5d76\": container with ID starting with 254b09e75703e74362008e004053c986242c94eaa6f88575fbd0b93efcbe5d76 not found: ID does not exist" Sep 29 10:53:30 crc kubenswrapper[4779]: I0929 10:53:30.741959 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d1fc6aa4-b4c0-4dca-8d26-a4662ba35e30" path="/var/lib/kubelet/pods/d1fc6aa4-b4c0-4dca-8d26-a4662ba35e30/volumes" Sep 29 10:53:46 crc kubenswrapper[4779]: I0929 10:53:46.966349 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 10:53:46 crc kubenswrapper[4779]: I0929 10:53:46.967009 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 10:54:16 crc kubenswrapper[4779]: I0929 10:54:16.966894 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 10:54:16 crc kubenswrapper[4779]: I0929 10:54:16.967417 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 10:54:16 crc kubenswrapper[4779]: I0929 10:54:16.967468 4779 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" Sep 29 10:54:16 crc kubenswrapper[4779]: I0929 10:54:16.969784 4779 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6377a2fa110ce4d53ebc59182b719c7dd5068de2049c7a6be9b505163f964133"} pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 10:54:16 crc kubenswrapper[4779]: I0929 10:54:16.969962 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" containerID="cri-o://6377a2fa110ce4d53ebc59182b719c7dd5068de2049c7a6be9b505163f964133" gracePeriod=600 Sep 29 10:54:17 crc kubenswrapper[4779]: I0929 10:54:17.562016 4779 generic.go:334] "Generic (PLEG): container finished" podID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerID="6377a2fa110ce4d53ebc59182b719c7dd5068de2049c7a6be9b505163f964133" exitCode=0 Sep 29 10:54:17 crc kubenswrapper[4779]: I0929 10:54:17.562088 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" event={"ID":"f1a5d3a7-37d9-4a87-864c-e4af7f504a19","Type":"ContainerDied","Data":"6377a2fa110ce4d53ebc59182b719c7dd5068de2049c7a6be9b505163f964133"} Sep 29 10:54:17 crc kubenswrapper[4779]: I0929 10:54:17.562576 4779 scope.go:117] "RemoveContainer" containerID="1865f24d23363df0a5c4badf3f6f96c7f06b8d41471c03b2b2693d9d64bf4ee6" Sep 29 10:54:18 crc kubenswrapper[4779]: I0929 10:54:18.577207 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" event={"ID":"f1a5d3a7-37d9-4a87-864c-e4af7f504a19","Type":"ContainerStarted","Data":"15b85b19dfd233a3ba37a665f3980c005f63a1bb436012dc1ed88412b757a289"} Sep 29 10:56:46 crc kubenswrapper[4779]: I0929 10:56:46.965961 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 10:56:46 crc kubenswrapper[4779]: I0929 10:56:46.966846 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 10:57:16 crc kubenswrapper[4779]: I0929 10:57:16.966608 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 10:57:16 crc kubenswrapper[4779]: I0929 10:57:16.967463 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 10:57:46 crc kubenswrapper[4779]: I0929 10:57:46.966848 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 10:57:46 crc kubenswrapper[4779]: I0929 10:57:46.967600 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 10:57:46 crc kubenswrapper[4779]: I0929 10:57:46.967672 4779 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" Sep 29 10:57:46 crc kubenswrapper[4779]: I0929 10:57:46.969001 4779 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"15b85b19dfd233a3ba37a665f3980c005f63a1bb436012dc1ed88412b757a289"} pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 10:57:46 crc kubenswrapper[4779]: I0929 10:57:46.969086 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" containerID="cri-o://15b85b19dfd233a3ba37a665f3980c005f63a1bb436012dc1ed88412b757a289" gracePeriod=600 Sep 29 10:57:47 crc kubenswrapper[4779]: E0929 10:57:47.098762 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:57:47 crc kubenswrapper[4779]: I0929 10:57:47.944555 4779 generic.go:334] "Generic (PLEG): container finished" podID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerID="15b85b19dfd233a3ba37a665f3980c005f63a1bb436012dc1ed88412b757a289" exitCode=0 Sep 29 10:57:47 crc kubenswrapper[4779]: I0929 10:57:47.944646 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" event={"ID":"f1a5d3a7-37d9-4a87-864c-e4af7f504a19","Type":"ContainerDied","Data":"15b85b19dfd233a3ba37a665f3980c005f63a1bb436012dc1ed88412b757a289"} Sep 29 10:57:47 crc kubenswrapper[4779]: I0929 10:57:47.944704 4779 scope.go:117] "RemoveContainer" containerID="6377a2fa110ce4d53ebc59182b719c7dd5068de2049c7a6be9b505163f964133" Sep 29 10:57:47 crc kubenswrapper[4779]: I0929 10:57:47.945666 4779 scope.go:117] "RemoveContainer" containerID="15b85b19dfd233a3ba37a665f3980c005f63a1bb436012dc1ed88412b757a289" Sep 29 10:57:47 crc kubenswrapper[4779]: E0929 10:57:47.946160 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:57:55 crc kubenswrapper[4779]: E0929 10:57:55.334680 4779 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.30:36884->38.102.83.30:34385: write tcp 38.102.83.30:36884->38.102.83.30:34385: write: broken pipe Sep 29 10:57:59 crc kubenswrapper[4779]: I0929 10:57:59.714932 4779 scope.go:117] "RemoveContainer" containerID="15b85b19dfd233a3ba37a665f3980c005f63a1bb436012dc1ed88412b757a289" Sep 29 10:57:59 crc kubenswrapper[4779]: E0929 10:57:59.715972 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:58:10 crc kubenswrapper[4779]: I0929 10:58:10.715042 4779 scope.go:117] "RemoveContainer" containerID="15b85b19dfd233a3ba37a665f3980c005f63a1bb436012dc1ed88412b757a289" Sep 29 10:58:10 crc kubenswrapper[4779]: E0929 10:58:10.716018 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:58:24 crc kubenswrapper[4779]: I0929 10:58:24.714863 4779 scope.go:117] "RemoveContainer" containerID="15b85b19dfd233a3ba37a665f3980c005f63a1bb436012dc1ed88412b757a289" Sep 29 10:58:24 crc kubenswrapper[4779]: E0929 10:58:24.716138 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:58:35 crc kubenswrapper[4779]: I0929 10:58:35.715559 4779 scope.go:117] "RemoveContainer" containerID="15b85b19dfd233a3ba37a665f3980c005f63a1bb436012dc1ed88412b757a289" Sep 29 10:58:35 crc kubenswrapper[4779]: E0929 10:58:35.716315 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:58:50 crc kubenswrapper[4779]: I0929 10:58:50.715271 4779 scope.go:117] "RemoveContainer" containerID="15b85b19dfd233a3ba37a665f3980c005f63a1bb436012dc1ed88412b757a289" Sep 29 10:58:50 crc kubenswrapper[4779]: E0929 10:58:50.716483 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:58:51 crc kubenswrapper[4779]: I0929 10:58:51.205312 4779 scope.go:117] "RemoveContainer" containerID="35e954eaa4295eab434806cde97692364dedf63af06ebcdb54667841f94c8490" Sep 29 10:58:51 crc kubenswrapper[4779]: I0929 10:58:51.250979 4779 scope.go:117] "RemoveContainer" containerID="d30cf5a57464ef190163fd3646a536b8513e7630569ed23a8fe9d8eefc393f95" Sep 29 10:58:51 crc kubenswrapper[4779]: I0929 10:58:51.305979 4779 scope.go:117] "RemoveContainer" containerID="53babb72a9302a0d6034cb649e7b09303266b3c306fc57ce1e506894f787bcb3" Sep 29 10:59:03 crc kubenswrapper[4779]: I0929 10:59:03.990666 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-chtv8"] Sep 29 10:59:03 crc kubenswrapper[4779]: E0929 10:59:03.992062 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1fc6aa4-b4c0-4dca-8d26-a4662ba35e30" containerName="extract-content" Sep 29 10:59:03 crc kubenswrapper[4779]: I0929 10:59:03.992080 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1fc6aa4-b4c0-4dca-8d26-a4662ba35e30" containerName="extract-content" Sep 29 10:59:03 crc kubenswrapper[4779]: E0929 10:59:03.992104 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1fc6aa4-b4c0-4dca-8d26-a4662ba35e30" containerName="registry-server" Sep 29 10:59:03 crc kubenswrapper[4779]: I0929 10:59:03.992110 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1fc6aa4-b4c0-4dca-8d26-a4662ba35e30" containerName="registry-server" Sep 29 10:59:03 crc kubenswrapper[4779]: E0929 10:59:03.992122 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1fc6aa4-b4c0-4dca-8d26-a4662ba35e30" containerName="extract-utilities" Sep 29 10:59:03 crc kubenswrapper[4779]: I0929 10:59:03.992128 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1fc6aa4-b4c0-4dca-8d26-a4662ba35e30" containerName="extract-utilities" Sep 29 10:59:03 crc kubenswrapper[4779]: I0929 10:59:03.992328 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="d1fc6aa4-b4c0-4dca-8d26-a4662ba35e30" containerName="registry-server" Sep 29 10:59:03 crc kubenswrapper[4779]: I0929 10:59:03.994536 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-chtv8" Sep 29 10:59:04 crc kubenswrapper[4779]: I0929 10:59:04.005392 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-chtv8"] Sep 29 10:59:04 crc kubenswrapper[4779]: I0929 10:59:04.121409 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0e6abe32-8486-4bd0-8478-32dcdbb60044-catalog-content\") pod \"certified-operators-chtv8\" (UID: \"0e6abe32-8486-4bd0-8478-32dcdbb60044\") " pod="openshift-marketplace/certified-operators-chtv8" Sep 29 10:59:04 crc kubenswrapper[4779]: I0929 10:59:04.122920 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0e6abe32-8486-4bd0-8478-32dcdbb60044-utilities\") pod \"certified-operators-chtv8\" (UID: \"0e6abe32-8486-4bd0-8478-32dcdbb60044\") " pod="openshift-marketplace/certified-operators-chtv8" Sep 29 10:59:04 crc kubenswrapper[4779]: I0929 10:59:04.122989 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ff8wr\" (UniqueName: \"kubernetes.io/projected/0e6abe32-8486-4bd0-8478-32dcdbb60044-kube-api-access-ff8wr\") pod \"certified-operators-chtv8\" (UID: \"0e6abe32-8486-4bd0-8478-32dcdbb60044\") " pod="openshift-marketplace/certified-operators-chtv8" Sep 29 10:59:04 crc kubenswrapper[4779]: I0929 10:59:04.226294 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0e6abe32-8486-4bd0-8478-32dcdbb60044-catalog-content\") pod \"certified-operators-chtv8\" (UID: \"0e6abe32-8486-4bd0-8478-32dcdbb60044\") " pod="openshift-marketplace/certified-operators-chtv8" Sep 29 10:59:04 crc kubenswrapper[4779]: I0929 10:59:04.226431 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0e6abe32-8486-4bd0-8478-32dcdbb60044-utilities\") pod \"certified-operators-chtv8\" (UID: \"0e6abe32-8486-4bd0-8478-32dcdbb60044\") " pod="openshift-marketplace/certified-operators-chtv8" Sep 29 10:59:04 crc kubenswrapper[4779]: I0929 10:59:04.226477 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ff8wr\" (UniqueName: \"kubernetes.io/projected/0e6abe32-8486-4bd0-8478-32dcdbb60044-kube-api-access-ff8wr\") pod \"certified-operators-chtv8\" (UID: \"0e6abe32-8486-4bd0-8478-32dcdbb60044\") " pod="openshift-marketplace/certified-operators-chtv8" Sep 29 10:59:04 crc kubenswrapper[4779]: I0929 10:59:04.226836 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0e6abe32-8486-4bd0-8478-32dcdbb60044-catalog-content\") pod \"certified-operators-chtv8\" (UID: \"0e6abe32-8486-4bd0-8478-32dcdbb60044\") " pod="openshift-marketplace/certified-operators-chtv8" Sep 29 10:59:04 crc kubenswrapper[4779]: I0929 10:59:04.227227 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0e6abe32-8486-4bd0-8478-32dcdbb60044-utilities\") pod \"certified-operators-chtv8\" (UID: \"0e6abe32-8486-4bd0-8478-32dcdbb60044\") " pod="openshift-marketplace/certified-operators-chtv8" Sep 29 10:59:04 crc kubenswrapper[4779]: I0929 10:59:04.262809 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ff8wr\" (UniqueName: \"kubernetes.io/projected/0e6abe32-8486-4bd0-8478-32dcdbb60044-kube-api-access-ff8wr\") pod \"certified-operators-chtv8\" (UID: \"0e6abe32-8486-4bd0-8478-32dcdbb60044\") " pod="openshift-marketplace/certified-operators-chtv8" Sep 29 10:59:04 crc kubenswrapper[4779]: I0929 10:59:04.328806 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-chtv8" Sep 29 10:59:04 crc kubenswrapper[4779]: I0929 10:59:04.917998 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-chtv8"] Sep 29 10:59:05 crc kubenswrapper[4779]: I0929 10:59:05.714806 4779 scope.go:117] "RemoveContainer" containerID="15b85b19dfd233a3ba37a665f3980c005f63a1bb436012dc1ed88412b757a289" Sep 29 10:59:05 crc kubenswrapper[4779]: E0929 10:59:05.715973 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:59:05 crc kubenswrapper[4779]: I0929 10:59:05.777750 4779 generic.go:334] "Generic (PLEG): container finished" podID="0e6abe32-8486-4bd0-8478-32dcdbb60044" containerID="c2f25cee7ed00d2bc1d0640e1ba5b6dad88535572ccf911a8ea57d7fad692e0f" exitCode=0 Sep 29 10:59:05 crc kubenswrapper[4779]: I0929 10:59:05.777817 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-chtv8" event={"ID":"0e6abe32-8486-4bd0-8478-32dcdbb60044","Type":"ContainerDied","Data":"c2f25cee7ed00d2bc1d0640e1ba5b6dad88535572ccf911a8ea57d7fad692e0f"} Sep 29 10:59:05 crc kubenswrapper[4779]: I0929 10:59:05.777871 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-chtv8" event={"ID":"0e6abe32-8486-4bd0-8478-32dcdbb60044","Type":"ContainerStarted","Data":"0f39d01509958cd3159428df48fc36d6628c371bc5a5fa6d7ac4e4831fd82c68"} Sep 29 10:59:05 crc kubenswrapper[4779]: I0929 10:59:05.781489 4779 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 10:59:07 crc kubenswrapper[4779]: I0929 10:59:07.798528 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-chtv8" event={"ID":"0e6abe32-8486-4bd0-8478-32dcdbb60044","Type":"ContainerStarted","Data":"8f9927834483432a143417fb858757e02a4684b4728d52b403306f03bf13fbfa"} Sep 29 10:59:08 crc kubenswrapper[4779]: I0929 10:59:08.824493 4779 generic.go:334] "Generic (PLEG): container finished" podID="0e6abe32-8486-4bd0-8478-32dcdbb60044" containerID="8f9927834483432a143417fb858757e02a4684b4728d52b403306f03bf13fbfa" exitCode=0 Sep 29 10:59:08 crc kubenswrapper[4779]: I0929 10:59:08.824575 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-chtv8" event={"ID":"0e6abe32-8486-4bd0-8478-32dcdbb60044","Type":"ContainerDied","Data":"8f9927834483432a143417fb858757e02a4684b4728d52b403306f03bf13fbfa"} Sep 29 10:59:09 crc kubenswrapper[4779]: I0929 10:59:09.847174 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-chtv8" event={"ID":"0e6abe32-8486-4bd0-8478-32dcdbb60044","Type":"ContainerStarted","Data":"649f78171017ef400bab634d28fe1d27c03d8da669fbadd95e07bbafb50c02c0"} Sep 29 10:59:09 crc kubenswrapper[4779]: I0929 10:59:09.880690 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-chtv8" podStartSLOduration=3.333758938 podStartE2EDuration="6.880660286s" podCreationTimestamp="2025-09-29 10:59:03 +0000 UTC" firstStartedPulling="2025-09-29 10:59:05.781206835 +0000 UTC m=+5377.762530739" lastFinishedPulling="2025-09-29 10:59:09.328108183 +0000 UTC m=+5381.309432087" observedRunningTime="2025-09-29 10:59:09.87116375 +0000 UTC m=+5381.852487654" watchObservedRunningTime="2025-09-29 10:59:09.880660286 +0000 UTC m=+5381.861984190" Sep 29 10:59:14 crc kubenswrapper[4779]: I0929 10:59:14.329922 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-chtv8" Sep 29 10:59:14 crc kubenswrapper[4779]: I0929 10:59:14.330573 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-chtv8" Sep 29 10:59:14 crc kubenswrapper[4779]: I0929 10:59:14.385106 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-chtv8" Sep 29 10:59:14 crc kubenswrapper[4779]: I0929 10:59:14.969015 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-chtv8" Sep 29 10:59:15 crc kubenswrapper[4779]: I0929 10:59:15.030269 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-chtv8"] Sep 29 10:59:16 crc kubenswrapper[4779]: I0929 10:59:16.923059 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-chtv8" podUID="0e6abe32-8486-4bd0-8478-32dcdbb60044" containerName="registry-server" containerID="cri-o://649f78171017ef400bab634d28fe1d27c03d8da669fbadd95e07bbafb50c02c0" gracePeriod=2 Sep 29 10:59:17 crc kubenswrapper[4779]: I0929 10:59:17.470710 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-chtv8" Sep 29 10:59:17 crc kubenswrapper[4779]: I0929 10:59:17.578847 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0e6abe32-8486-4bd0-8478-32dcdbb60044-utilities\") pod \"0e6abe32-8486-4bd0-8478-32dcdbb60044\" (UID: \"0e6abe32-8486-4bd0-8478-32dcdbb60044\") " Sep 29 10:59:17 crc kubenswrapper[4779]: I0929 10:59:17.579091 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0e6abe32-8486-4bd0-8478-32dcdbb60044-catalog-content\") pod \"0e6abe32-8486-4bd0-8478-32dcdbb60044\" (UID: \"0e6abe32-8486-4bd0-8478-32dcdbb60044\") " Sep 29 10:59:17 crc kubenswrapper[4779]: I0929 10:59:17.579339 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ff8wr\" (UniqueName: \"kubernetes.io/projected/0e6abe32-8486-4bd0-8478-32dcdbb60044-kube-api-access-ff8wr\") pod \"0e6abe32-8486-4bd0-8478-32dcdbb60044\" (UID: \"0e6abe32-8486-4bd0-8478-32dcdbb60044\") " Sep 29 10:59:17 crc kubenswrapper[4779]: I0929 10:59:17.580761 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0e6abe32-8486-4bd0-8478-32dcdbb60044-utilities" (OuterVolumeSpecName: "utilities") pod "0e6abe32-8486-4bd0-8478-32dcdbb60044" (UID: "0e6abe32-8486-4bd0-8478-32dcdbb60044"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 10:59:17 crc kubenswrapper[4779]: I0929 10:59:17.631398 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0e6abe32-8486-4bd0-8478-32dcdbb60044-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0e6abe32-8486-4bd0-8478-32dcdbb60044" (UID: "0e6abe32-8486-4bd0-8478-32dcdbb60044"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 10:59:17 crc kubenswrapper[4779]: I0929 10:59:17.683244 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0e6abe32-8486-4bd0-8478-32dcdbb60044-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 10:59:17 crc kubenswrapper[4779]: I0929 10:59:17.683286 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0e6abe32-8486-4bd0-8478-32dcdbb60044-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 10:59:17 crc kubenswrapper[4779]: I0929 10:59:17.935624 4779 generic.go:334] "Generic (PLEG): container finished" podID="0e6abe32-8486-4bd0-8478-32dcdbb60044" containerID="649f78171017ef400bab634d28fe1d27c03d8da669fbadd95e07bbafb50c02c0" exitCode=0 Sep 29 10:59:17 crc kubenswrapper[4779]: I0929 10:59:17.935690 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-chtv8" event={"ID":"0e6abe32-8486-4bd0-8478-32dcdbb60044","Type":"ContainerDied","Data":"649f78171017ef400bab634d28fe1d27c03d8da669fbadd95e07bbafb50c02c0"} Sep 29 10:59:17 crc kubenswrapper[4779]: I0929 10:59:17.935745 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-chtv8" event={"ID":"0e6abe32-8486-4bd0-8478-32dcdbb60044","Type":"ContainerDied","Data":"0f39d01509958cd3159428df48fc36d6628c371bc5a5fa6d7ac4e4831fd82c68"} Sep 29 10:59:17 crc kubenswrapper[4779]: I0929 10:59:17.935761 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-chtv8" Sep 29 10:59:17 crc kubenswrapper[4779]: I0929 10:59:17.935774 4779 scope.go:117] "RemoveContainer" containerID="649f78171017ef400bab634d28fe1d27c03d8da669fbadd95e07bbafb50c02c0" Sep 29 10:59:17 crc kubenswrapper[4779]: I0929 10:59:17.963416 4779 scope.go:117] "RemoveContainer" containerID="8f9927834483432a143417fb858757e02a4684b4728d52b403306f03bf13fbfa" Sep 29 10:59:18 crc kubenswrapper[4779]: I0929 10:59:18.316248 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0e6abe32-8486-4bd0-8478-32dcdbb60044-kube-api-access-ff8wr" (OuterVolumeSpecName: "kube-api-access-ff8wr") pod "0e6abe32-8486-4bd0-8478-32dcdbb60044" (UID: "0e6abe32-8486-4bd0-8478-32dcdbb60044"). InnerVolumeSpecName "kube-api-access-ff8wr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 10:59:18 crc kubenswrapper[4779]: I0929 10:59:18.335959 4779 scope.go:117] "RemoveContainer" containerID="c2f25cee7ed00d2bc1d0640e1ba5b6dad88535572ccf911a8ea57d7fad692e0f" Sep 29 10:59:18 crc kubenswrapper[4779]: I0929 10:59:18.397411 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ff8wr\" (UniqueName: \"kubernetes.io/projected/0e6abe32-8486-4bd0-8478-32dcdbb60044-kube-api-access-ff8wr\") on node \"crc\" DevicePath \"\"" Sep 29 10:59:18 crc kubenswrapper[4779]: I0929 10:59:18.570306 4779 scope.go:117] "RemoveContainer" containerID="649f78171017ef400bab634d28fe1d27c03d8da669fbadd95e07bbafb50c02c0" Sep 29 10:59:18 crc kubenswrapper[4779]: E0929 10:59:18.570990 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"649f78171017ef400bab634d28fe1d27c03d8da669fbadd95e07bbafb50c02c0\": container with ID starting with 649f78171017ef400bab634d28fe1d27c03d8da669fbadd95e07bbafb50c02c0 not found: ID does not exist" containerID="649f78171017ef400bab634d28fe1d27c03d8da669fbadd95e07bbafb50c02c0" Sep 29 10:59:18 crc kubenswrapper[4779]: I0929 10:59:18.571049 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"649f78171017ef400bab634d28fe1d27c03d8da669fbadd95e07bbafb50c02c0"} err="failed to get container status \"649f78171017ef400bab634d28fe1d27c03d8da669fbadd95e07bbafb50c02c0\": rpc error: code = NotFound desc = could not find container \"649f78171017ef400bab634d28fe1d27c03d8da669fbadd95e07bbafb50c02c0\": container with ID starting with 649f78171017ef400bab634d28fe1d27c03d8da669fbadd95e07bbafb50c02c0 not found: ID does not exist" Sep 29 10:59:18 crc kubenswrapper[4779]: I0929 10:59:18.571084 4779 scope.go:117] "RemoveContainer" containerID="8f9927834483432a143417fb858757e02a4684b4728d52b403306f03bf13fbfa" Sep 29 10:59:18 crc kubenswrapper[4779]: E0929 10:59:18.571510 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8f9927834483432a143417fb858757e02a4684b4728d52b403306f03bf13fbfa\": container with ID starting with 8f9927834483432a143417fb858757e02a4684b4728d52b403306f03bf13fbfa not found: ID does not exist" containerID="8f9927834483432a143417fb858757e02a4684b4728d52b403306f03bf13fbfa" Sep 29 10:59:18 crc kubenswrapper[4779]: I0929 10:59:18.571548 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f9927834483432a143417fb858757e02a4684b4728d52b403306f03bf13fbfa"} err="failed to get container status \"8f9927834483432a143417fb858757e02a4684b4728d52b403306f03bf13fbfa\": rpc error: code = NotFound desc = could not find container \"8f9927834483432a143417fb858757e02a4684b4728d52b403306f03bf13fbfa\": container with ID starting with 8f9927834483432a143417fb858757e02a4684b4728d52b403306f03bf13fbfa not found: ID does not exist" Sep 29 10:59:18 crc kubenswrapper[4779]: I0929 10:59:18.571569 4779 scope.go:117] "RemoveContainer" containerID="c2f25cee7ed00d2bc1d0640e1ba5b6dad88535572ccf911a8ea57d7fad692e0f" Sep 29 10:59:18 crc kubenswrapper[4779]: E0929 10:59:18.571841 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c2f25cee7ed00d2bc1d0640e1ba5b6dad88535572ccf911a8ea57d7fad692e0f\": container with ID starting with c2f25cee7ed00d2bc1d0640e1ba5b6dad88535572ccf911a8ea57d7fad692e0f not found: ID does not exist" containerID="c2f25cee7ed00d2bc1d0640e1ba5b6dad88535572ccf911a8ea57d7fad692e0f" Sep 29 10:59:18 crc kubenswrapper[4779]: I0929 10:59:18.571872 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c2f25cee7ed00d2bc1d0640e1ba5b6dad88535572ccf911a8ea57d7fad692e0f"} err="failed to get container status \"c2f25cee7ed00d2bc1d0640e1ba5b6dad88535572ccf911a8ea57d7fad692e0f\": rpc error: code = NotFound desc = could not find container \"c2f25cee7ed00d2bc1d0640e1ba5b6dad88535572ccf911a8ea57d7fad692e0f\": container with ID starting with c2f25cee7ed00d2bc1d0640e1ba5b6dad88535572ccf911a8ea57d7fad692e0f not found: ID does not exist" Sep 29 10:59:18 crc kubenswrapper[4779]: E0929 10:59:18.704149 4779 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0e6abe32_8486_4bd0_8478_32dcdbb60044.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0e6abe32_8486_4bd0_8478_32dcdbb60044.slice/crio-0f39d01509958cd3159428df48fc36d6628c371bc5a5fa6d7ac4e4831fd82c68\": RecentStats: unable to find data in memory cache]" Sep 29 10:59:18 crc kubenswrapper[4779]: I0929 10:59:18.705448 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-chtv8"] Sep 29 10:59:18 crc kubenswrapper[4779]: I0929 10:59:18.732691 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-chtv8"] Sep 29 10:59:19 crc kubenswrapper[4779]: I0929 10:59:19.715305 4779 scope.go:117] "RemoveContainer" containerID="15b85b19dfd233a3ba37a665f3980c005f63a1bb436012dc1ed88412b757a289" Sep 29 10:59:19 crc kubenswrapper[4779]: E0929 10:59:19.716287 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:59:20 crc kubenswrapper[4779]: I0929 10:59:20.727945 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0e6abe32-8486-4bd0-8478-32dcdbb60044" path="/var/lib/kubelet/pods/0e6abe32-8486-4bd0-8478-32dcdbb60044/volumes" Sep 29 10:59:31 crc kubenswrapper[4779]: I0929 10:59:31.272966 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-pvdk9"] Sep 29 10:59:31 crc kubenswrapper[4779]: E0929 10:59:31.274298 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e6abe32-8486-4bd0-8478-32dcdbb60044" containerName="registry-server" Sep 29 10:59:31 crc kubenswrapper[4779]: I0929 10:59:31.274319 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e6abe32-8486-4bd0-8478-32dcdbb60044" containerName="registry-server" Sep 29 10:59:31 crc kubenswrapper[4779]: E0929 10:59:31.274342 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e6abe32-8486-4bd0-8478-32dcdbb60044" containerName="extract-utilities" Sep 29 10:59:31 crc kubenswrapper[4779]: I0929 10:59:31.274350 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e6abe32-8486-4bd0-8478-32dcdbb60044" containerName="extract-utilities" Sep 29 10:59:31 crc kubenswrapper[4779]: E0929 10:59:31.274385 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e6abe32-8486-4bd0-8478-32dcdbb60044" containerName="extract-content" Sep 29 10:59:31 crc kubenswrapper[4779]: I0929 10:59:31.274395 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e6abe32-8486-4bd0-8478-32dcdbb60044" containerName="extract-content" Sep 29 10:59:31 crc kubenswrapper[4779]: I0929 10:59:31.274681 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e6abe32-8486-4bd0-8478-32dcdbb60044" containerName="registry-server" Sep 29 10:59:31 crc kubenswrapper[4779]: I0929 10:59:31.276823 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pvdk9" Sep 29 10:59:31 crc kubenswrapper[4779]: I0929 10:59:31.283236 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-pvdk9"] Sep 29 10:59:31 crc kubenswrapper[4779]: I0929 10:59:31.325450 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e804ce93-8166-4b5c-a18e-728721cbc081-utilities\") pod \"community-operators-pvdk9\" (UID: \"e804ce93-8166-4b5c-a18e-728721cbc081\") " pod="openshift-marketplace/community-operators-pvdk9" Sep 29 10:59:31 crc kubenswrapper[4779]: I0929 10:59:31.325571 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cmpkq\" (UniqueName: \"kubernetes.io/projected/e804ce93-8166-4b5c-a18e-728721cbc081-kube-api-access-cmpkq\") pod \"community-operators-pvdk9\" (UID: \"e804ce93-8166-4b5c-a18e-728721cbc081\") " pod="openshift-marketplace/community-operators-pvdk9" Sep 29 10:59:31 crc kubenswrapper[4779]: I0929 10:59:31.325666 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e804ce93-8166-4b5c-a18e-728721cbc081-catalog-content\") pod \"community-operators-pvdk9\" (UID: \"e804ce93-8166-4b5c-a18e-728721cbc081\") " pod="openshift-marketplace/community-operators-pvdk9" Sep 29 10:59:31 crc kubenswrapper[4779]: I0929 10:59:31.427309 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cmpkq\" (UniqueName: \"kubernetes.io/projected/e804ce93-8166-4b5c-a18e-728721cbc081-kube-api-access-cmpkq\") pod \"community-operators-pvdk9\" (UID: \"e804ce93-8166-4b5c-a18e-728721cbc081\") " pod="openshift-marketplace/community-operators-pvdk9" Sep 29 10:59:31 crc kubenswrapper[4779]: I0929 10:59:31.427499 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e804ce93-8166-4b5c-a18e-728721cbc081-catalog-content\") pod \"community-operators-pvdk9\" (UID: \"e804ce93-8166-4b5c-a18e-728721cbc081\") " pod="openshift-marketplace/community-operators-pvdk9" Sep 29 10:59:31 crc kubenswrapper[4779]: I0929 10:59:31.427605 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e804ce93-8166-4b5c-a18e-728721cbc081-utilities\") pod \"community-operators-pvdk9\" (UID: \"e804ce93-8166-4b5c-a18e-728721cbc081\") " pod="openshift-marketplace/community-operators-pvdk9" Sep 29 10:59:31 crc kubenswrapper[4779]: I0929 10:59:31.428268 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e804ce93-8166-4b5c-a18e-728721cbc081-catalog-content\") pod \"community-operators-pvdk9\" (UID: \"e804ce93-8166-4b5c-a18e-728721cbc081\") " pod="openshift-marketplace/community-operators-pvdk9" Sep 29 10:59:31 crc kubenswrapper[4779]: I0929 10:59:31.428323 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e804ce93-8166-4b5c-a18e-728721cbc081-utilities\") pod \"community-operators-pvdk9\" (UID: \"e804ce93-8166-4b5c-a18e-728721cbc081\") " pod="openshift-marketplace/community-operators-pvdk9" Sep 29 10:59:31 crc kubenswrapper[4779]: I0929 10:59:31.455318 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cmpkq\" (UniqueName: \"kubernetes.io/projected/e804ce93-8166-4b5c-a18e-728721cbc081-kube-api-access-cmpkq\") pod \"community-operators-pvdk9\" (UID: \"e804ce93-8166-4b5c-a18e-728721cbc081\") " pod="openshift-marketplace/community-operators-pvdk9" Sep 29 10:59:31 crc kubenswrapper[4779]: I0929 10:59:31.611027 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pvdk9" Sep 29 10:59:31 crc kubenswrapper[4779]: I0929 10:59:31.716317 4779 scope.go:117] "RemoveContainer" containerID="15b85b19dfd233a3ba37a665f3980c005f63a1bb436012dc1ed88412b757a289" Sep 29 10:59:31 crc kubenswrapper[4779]: E0929 10:59:31.716648 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:59:32 crc kubenswrapper[4779]: I0929 10:59:32.222537 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-pvdk9"] Sep 29 10:59:33 crc kubenswrapper[4779]: I0929 10:59:33.117918 4779 generic.go:334] "Generic (PLEG): container finished" podID="e804ce93-8166-4b5c-a18e-728721cbc081" containerID="326ef98fd6850861dcd8c024861bca4a8dd5c5d055facc39066de252161d83aa" exitCode=0 Sep 29 10:59:33 crc kubenswrapper[4779]: I0929 10:59:33.118350 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pvdk9" event={"ID":"e804ce93-8166-4b5c-a18e-728721cbc081","Type":"ContainerDied","Data":"326ef98fd6850861dcd8c024861bca4a8dd5c5d055facc39066de252161d83aa"} Sep 29 10:59:33 crc kubenswrapper[4779]: I0929 10:59:33.118442 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pvdk9" event={"ID":"e804ce93-8166-4b5c-a18e-728721cbc081","Type":"ContainerStarted","Data":"a93b0b543e7efce55a75b977b35eb1503a76f8c8e300c62534f0e6fefc50c0b0"} Sep 29 10:59:34 crc kubenswrapper[4779]: I0929 10:59:34.136409 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pvdk9" event={"ID":"e804ce93-8166-4b5c-a18e-728721cbc081","Type":"ContainerStarted","Data":"357cf713f3e016aaa8de9fa149b4dac06e445aadaaf0c88128c4fa0a6eeca9ac"} Sep 29 10:59:35 crc kubenswrapper[4779]: I0929 10:59:35.149918 4779 generic.go:334] "Generic (PLEG): container finished" podID="e804ce93-8166-4b5c-a18e-728721cbc081" containerID="357cf713f3e016aaa8de9fa149b4dac06e445aadaaf0c88128c4fa0a6eeca9ac" exitCode=0 Sep 29 10:59:35 crc kubenswrapper[4779]: I0929 10:59:35.150026 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pvdk9" event={"ID":"e804ce93-8166-4b5c-a18e-728721cbc081","Type":"ContainerDied","Data":"357cf713f3e016aaa8de9fa149b4dac06e445aadaaf0c88128c4fa0a6eeca9ac"} Sep 29 10:59:36 crc kubenswrapper[4779]: I0929 10:59:36.177690 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pvdk9" event={"ID":"e804ce93-8166-4b5c-a18e-728721cbc081","Type":"ContainerStarted","Data":"8d9eda50969c1ff9031599d99cad3d4a6a7344d286512832e9420b198c2b7b1e"} Sep 29 10:59:36 crc kubenswrapper[4779]: I0929 10:59:36.206760 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-pvdk9" podStartSLOduration=2.6503444099999998 podStartE2EDuration="5.206732765s" podCreationTimestamp="2025-09-29 10:59:31 +0000 UTC" firstStartedPulling="2025-09-29 10:59:33.120853118 +0000 UTC m=+5405.102177022" lastFinishedPulling="2025-09-29 10:59:35.677241473 +0000 UTC m=+5407.658565377" observedRunningTime="2025-09-29 10:59:36.201941676 +0000 UTC m=+5408.183265590" watchObservedRunningTime="2025-09-29 10:59:36.206732765 +0000 UTC m=+5408.188056669" Sep 29 10:59:41 crc kubenswrapper[4779]: I0929 10:59:41.611362 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-pvdk9" Sep 29 10:59:41 crc kubenswrapper[4779]: I0929 10:59:41.612194 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-pvdk9" Sep 29 10:59:41 crc kubenswrapper[4779]: I0929 10:59:41.672395 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-pvdk9" Sep 29 10:59:42 crc kubenswrapper[4779]: I0929 10:59:42.299995 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-pvdk9" Sep 29 10:59:42 crc kubenswrapper[4779]: I0929 10:59:42.349363 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-pvdk9"] Sep 29 10:59:42 crc kubenswrapper[4779]: I0929 10:59:42.715318 4779 scope.go:117] "RemoveContainer" containerID="15b85b19dfd233a3ba37a665f3980c005f63a1bb436012dc1ed88412b757a289" Sep 29 10:59:42 crc kubenswrapper[4779]: E0929 10:59:42.715702 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 10:59:44 crc kubenswrapper[4779]: I0929 10:59:44.276368 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-pvdk9" podUID="e804ce93-8166-4b5c-a18e-728721cbc081" containerName="registry-server" containerID="cri-o://8d9eda50969c1ff9031599d99cad3d4a6a7344d286512832e9420b198c2b7b1e" gracePeriod=2 Sep 29 10:59:45 crc kubenswrapper[4779]: I0929 10:59:45.290094 4779 generic.go:334] "Generic (PLEG): container finished" podID="e804ce93-8166-4b5c-a18e-728721cbc081" containerID="8d9eda50969c1ff9031599d99cad3d4a6a7344d286512832e9420b198c2b7b1e" exitCode=0 Sep 29 10:59:45 crc kubenswrapper[4779]: I0929 10:59:45.290399 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pvdk9" event={"ID":"e804ce93-8166-4b5c-a18e-728721cbc081","Type":"ContainerDied","Data":"8d9eda50969c1ff9031599d99cad3d4a6a7344d286512832e9420b198c2b7b1e"} Sep 29 10:59:45 crc kubenswrapper[4779]: I0929 10:59:45.517737 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pvdk9" Sep 29 10:59:45 crc kubenswrapper[4779]: I0929 10:59:45.619581 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e804ce93-8166-4b5c-a18e-728721cbc081-utilities\") pod \"e804ce93-8166-4b5c-a18e-728721cbc081\" (UID: \"e804ce93-8166-4b5c-a18e-728721cbc081\") " Sep 29 10:59:45 crc kubenswrapper[4779]: I0929 10:59:45.620333 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e804ce93-8166-4b5c-a18e-728721cbc081-catalog-content\") pod \"e804ce93-8166-4b5c-a18e-728721cbc081\" (UID: \"e804ce93-8166-4b5c-a18e-728721cbc081\") " Sep 29 10:59:45 crc kubenswrapper[4779]: I0929 10:59:45.620613 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cmpkq\" (UniqueName: \"kubernetes.io/projected/e804ce93-8166-4b5c-a18e-728721cbc081-kube-api-access-cmpkq\") pod \"e804ce93-8166-4b5c-a18e-728721cbc081\" (UID: \"e804ce93-8166-4b5c-a18e-728721cbc081\") " Sep 29 10:59:45 crc kubenswrapper[4779]: I0929 10:59:45.621138 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e804ce93-8166-4b5c-a18e-728721cbc081-utilities" (OuterVolumeSpecName: "utilities") pod "e804ce93-8166-4b5c-a18e-728721cbc081" (UID: "e804ce93-8166-4b5c-a18e-728721cbc081"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 10:59:45 crc kubenswrapper[4779]: I0929 10:59:45.621301 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e804ce93-8166-4b5c-a18e-728721cbc081-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 10:59:45 crc kubenswrapper[4779]: I0929 10:59:45.636048 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e804ce93-8166-4b5c-a18e-728721cbc081-kube-api-access-cmpkq" (OuterVolumeSpecName: "kube-api-access-cmpkq") pod "e804ce93-8166-4b5c-a18e-728721cbc081" (UID: "e804ce93-8166-4b5c-a18e-728721cbc081"). InnerVolumeSpecName "kube-api-access-cmpkq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 10:59:45 crc kubenswrapper[4779]: I0929 10:59:45.681457 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e804ce93-8166-4b5c-a18e-728721cbc081-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e804ce93-8166-4b5c-a18e-728721cbc081" (UID: "e804ce93-8166-4b5c-a18e-728721cbc081"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 10:59:45 crc kubenswrapper[4779]: I0929 10:59:45.723622 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e804ce93-8166-4b5c-a18e-728721cbc081-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 10:59:45 crc kubenswrapper[4779]: I0929 10:59:45.723672 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cmpkq\" (UniqueName: \"kubernetes.io/projected/e804ce93-8166-4b5c-a18e-728721cbc081-kube-api-access-cmpkq\") on node \"crc\" DevicePath \"\"" Sep 29 10:59:46 crc kubenswrapper[4779]: I0929 10:59:46.304378 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pvdk9" event={"ID":"e804ce93-8166-4b5c-a18e-728721cbc081","Type":"ContainerDied","Data":"a93b0b543e7efce55a75b977b35eb1503a76f8c8e300c62534f0e6fefc50c0b0"} Sep 29 10:59:46 crc kubenswrapper[4779]: I0929 10:59:46.304457 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pvdk9" Sep 29 10:59:46 crc kubenswrapper[4779]: I0929 10:59:46.304518 4779 scope.go:117] "RemoveContainer" containerID="8d9eda50969c1ff9031599d99cad3d4a6a7344d286512832e9420b198c2b7b1e" Sep 29 10:59:46 crc kubenswrapper[4779]: I0929 10:59:46.335735 4779 scope.go:117] "RemoveContainer" containerID="357cf713f3e016aaa8de9fa149b4dac06e445aadaaf0c88128c4fa0a6eeca9ac" Sep 29 10:59:46 crc kubenswrapper[4779]: I0929 10:59:46.356195 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-pvdk9"] Sep 29 10:59:46 crc kubenswrapper[4779]: I0929 10:59:46.365272 4779 scope.go:117] "RemoveContainer" containerID="326ef98fd6850861dcd8c024861bca4a8dd5c5d055facc39066de252161d83aa" Sep 29 10:59:46 crc kubenswrapper[4779]: I0929 10:59:46.366773 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-pvdk9"] Sep 29 10:59:46 crc kubenswrapper[4779]: I0929 10:59:46.728449 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e804ce93-8166-4b5c-a18e-728721cbc081" path="/var/lib/kubelet/pods/e804ce93-8166-4b5c-a18e-728721cbc081/volumes" Sep 29 10:59:53 crc kubenswrapper[4779]: I0929 10:59:53.714669 4779 scope.go:117] "RemoveContainer" containerID="15b85b19dfd233a3ba37a665f3980c005f63a1bb436012dc1ed88412b757a289" Sep 29 10:59:53 crc kubenswrapper[4779]: E0929 10:59:53.715521 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 11:00:00 crc kubenswrapper[4779]: I0929 11:00:00.152000 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319060-ntdzs"] Sep 29 11:00:00 crc kubenswrapper[4779]: E0929 11:00:00.153291 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e804ce93-8166-4b5c-a18e-728721cbc081" containerName="extract-content" Sep 29 11:00:00 crc kubenswrapper[4779]: I0929 11:00:00.153311 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="e804ce93-8166-4b5c-a18e-728721cbc081" containerName="extract-content" Sep 29 11:00:00 crc kubenswrapper[4779]: E0929 11:00:00.153357 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e804ce93-8166-4b5c-a18e-728721cbc081" containerName="extract-utilities" Sep 29 11:00:00 crc kubenswrapper[4779]: I0929 11:00:00.153367 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="e804ce93-8166-4b5c-a18e-728721cbc081" containerName="extract-utilities" Sep 29 11:00:00 crc kubenswrapper[4779]: E0929 11:00:00.153388 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e804ce93-8166-4b5c-a18e-728721cbc081" containerName="registry-server" Sep 29 11:00:00 crc kubenswrapper[4779]: I0929 11:00:00.153397 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="e804ce93-8166-4b5c-a18e-728721cbc081" containerName="registry-server" Sep 29 11:00:00 crc kubenswrapper[4779]: I0929 11:00:00.153607 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="e804ce93-8166-4b5c-a18e-728721cbc081" containerName="registry-server" Sep 29 11:00:00 crc kubenswrapper[4779]: I0929 11:00:00.154493 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319060-ntdzs" Sep 29 11:00:00 crc kubenswrapper[4779]: I0929 11:00:00.157360 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 29 11:00:00 crc kubenswrapper[4779]: I0929 11:00:00.157749 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 29 11:00:00 crc kubenswrapper[4779]: I0929 11:00:00.180986 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319060-ntdzs"] Sep 29 11:00:00 crc kubenswrapper[4779]: I0929 11:00:00.283405 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/05ac78f1-a505-43df-88f0-1abbbd027d94-secret-volume\") pod \"collect-profiles-29319060-ntdzs\" (UID: \"05ac78f1-a505-43df-88f0-1abbbd027d94\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319060-ntdzs" Sep 29 11:00:00 crc kubenswrapper[4779]: I0929 11:00:00.283548 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rlnw6\" (UniqueName: \"kubernetes.io/projected/05ac78f1-a505-43df-88f0-1abbbd027d94-kube-api-access-rlnw6\") pod \"collect-profiles-29319060-ntdzs\" (UID: \"05ac78f1-a505-43df-88f0-1abbbd027d94\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319060-ntdzs" Sep 29 11:00:00 crc kubenswrapper[4779]: I0929 11:00:00.283603 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/05ac78f1-a505-43df-88f0-1abbbd027d94-config-volume\") pod \"collect-profiles-29319060-ntdzs\" (UID: \"05ac78f1-a505-43df-88f0-1abbbd027d94\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319060-ntdzs" Sep 29 11:00:00 crc kubenswrapper[4779]: I0929 11:00:00.386543 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/05ac78f1-a505-43df-88f0-1abbbd027d94-secret-volume\") pod \"collect-profiles-29319060-ntdzs\" (UID: \"05ac78f1-a505-43df-88f0-1abbbd027d94\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319060-ntdzs" Sep 29 11:00:00 crc kubenswrapper[4779]: I0929 11:00:00.386695 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rlnw6\" (UniqueName: \"kubernetes.io/projected/05ac78f1-a505-43df-88f0-1abbbd027d94-kube-api-access-rlnw6\") pod \"collect-profiles-29319060-ntdzs\" (UID: \"05ac78f1-a505-43df-88f0-1abbbd027d94\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319060-ntdzs" Sep 29 11:00:00 crc kubenswrapper[4779]: I0929 11:00:00.386744 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/05ac78f1-a505-43df-88f0-1abbbd027d94-config-volume\") pod \"collect-profiles-29319060-ntdzs\" (UID: \"05ac78f1-a505-43df-88f0-1abbbd027d94\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319060-ntdzs" Sep 29 11:00:00 crc kubenswrapper[4779]: I0929 11:00:00.388489 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/05ac78f1-a505-43df-88f0-1abbbd027d94-config-volume\") pod \"collect-profiles-29319060-ntdzs\" (UID: \"05ac78f1-a505-43df-88f0-1abbbd027d94\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319060-ntdzs" Sep 29 11:00:00 crc kubenswrapper[4779]: I0929 11:00:00.393955 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/05ac78f1-a505-43df-88f0-1abbbd027d94-secret-volume\") pod \"collect-profiles-29319060-ntdzs\" (UID: \"05ac78f1-a505-43df-88f0-1abbbd027d94\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319060-ntdzs" Sep 29 11:00:00 crc kubenswrapper[4779]: I0929 11:00:00.404803 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rlnw6\" (UniqueName: \"kubernetes.io/projected/05ac78f1-a505-43df-88f0-1abbbd027d94-kube-api-access-rlnw6\") pod \"collect-profiles-29319060-ntdzs\" (UID: \"05ac78f1-a505-43df-88f0-1abbbd027d94\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319060-ntdzs" Sep 29 11:00:00 crc kubenswrapper[4779]: I0929 11:00:00.483333 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319060-ntdzs" Sep 29 11:00:00 crc kubenswrapper[4779]: I0929 11:00:00.974532 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319060-ntdzs"] Sep 29 11:00:01 crc kubenswrapper[4779]: I0929 11:00:01.456436 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319060-ntdzs" event={"ID":"05ac78f1-a505-43df-88f0-1abbbd027d94","Type":"ContainerStarted","Data":"8fb5e019a860580472966475168c3d7dfc4ab46c4eac2b6bdcd9fb591ee6eab2"} Sep 29 11:00:01 crc kubenswrapper[4779]: I0929 11:00:01.458061 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319060-ntdzs" event={"ID":"05ac78f1-a505-43df-88f0-1abbbd027d94","Type":"ContainerStarted","Data":"511c42a21e4114fd780d94859f690b68ef4d0f1334a8c8c684a11496defe213a"} Sep 29 11:00:02 crc kubenswrapper[4779]: I0929 11:00:02.467533 4779 generic.go:334] "Generic (PLEG): container finished" podID="05ac78f1-a505-43df-88f0-1abbbd027d94" containerID="8fb5e019a860580472966475168c3d7dfc4ab46c4eac2b6bdcd9fb591ee6eab2" exitCode=0 Sep 29 11:00:02 crc kubenswrapper[4779]: I0929 11:00:02.467654 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319060-ntdzs" event={"ID":"05ac78f1-a505-43df-88f0-1abbbd027d94","Type":"ContainerDied","Data":"8fb5e019a860580472966475168c3d7dfc4ab46c4eac2b6bdcd9fb591ee6eab2"} Sep 29 11:00:02 crc kubenswrapper[4779]: I0929 11:00:02.883288 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319060-ntdzs" Sep 29 11:00:03 crc kubenswrapper[4779]: I0929 11:00:03.053145 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/05ac78f1-a505-43df-88f0-1abbbd027d94-secret-volume\") pod \"05ac78f1-a505-43df-88f0-1abbbd027d94\" (UID: \"05ac78f1-a505-43df-88f0-1abbbd027d94\") " Sep 29 11:00:03 crc kubenswrapper[4779]: I0929 11:00:03.054115 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/05ac78f1-a505-43df-88f0-1abbbd027d94-config-volume\") pod \"05ac78f1-a505-43df-88f0-1abbbd027d94\" (UID: \"05ac78f1-a505-43df-88f0-1abbbd027d94\") " Sep 29 11:00:03 crc kubenswrapper[4779]: I0929 11:00:03.054157 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rlnw6\" (UniqueName: \"kubernetes.io/projected/05ac78f1-a505-43df-88f0-1abbbd027d94-kube-api-access-rlnw6\") pod \"05ac78f1-a505-43df-88f0-1abbbd027d94\" (UID: \"05ac78f1-a505-43df-88f0-1abbbd027d94\") " Sep 29 11:00:03 crc kubenswrapper[4779]: I0929 11:00:03.056098 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/05ac78f1-a505-43df-88f0-1abbbd027d94-config-volume" (OuterVolumeSpecName: "config-volume") pod "05ac78f1-a505-43df-88f0-1abbbd027d94" (UID: "05ac78f1-a505-43df-88f0-1abbbd027d94"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 11:00:03 crc kubenswrapper[4779]: I0929 11:00:03.063152 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/05ac78f1-a505-43df-88f0-1abbbd027d94-kube-api-access-rlnw6" (OuterVolumeSpecName: "kube-api-access-rlnw6") pod "05ac78f1-a505-43df-88f0-1abbbd027d94" (UID: "05ac78f1-a505-43df-88f0-1abbbd027d94"). InnerVolumeSpecName "kube-api-access-rlnw6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 11:00:03 crc kubenswrapper[4779]: I0929 11:00:03.070117 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/05ac78f1-a505-43df-88f0-1abbbd027d94-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "05ac78f1-a505-43df-88f0-1abbbd027d94" (UID: "05ac78f1-a505-43df-88f0-1abbbd027d94"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 11:00:03 crc kubenswrapper[4779]: I0929 11:00:03.157234 4779 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/05ac78f1-a505-43df-88f0-1abbbd027d94-config-volume\") on node \"crc\" DevicePath \"\"" Sep 29 11:00:03 crc kubenswrapper[4779]: I0929 11:00:03.157278 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rlnw6\" (UniqueName: \"kubernetes.io/projected/05ac78f1-a505-43df-88f0-1abbbd027d94-kube-api-access-rlnw6\") on node \"crc\" DevicePath \"\"" Sep 29 11:00:03 crc kubenswrapper[4779]: I0929 11:00:03.157290 4779 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/05ac78f1-a505-43df-88f0-1abbbd027d94-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 29 11:00:03 crc kubenswrapper[4779]: I0929 11:00:03.481551 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319060-ntdzs" event={"ID":"05ac78f1-a505-43df-88f0-1abbbd027d94","Type":"ContainerDied","Data":"511c42a21e4114fd780d94859f690b68ef4d0f1334a8c8c684a11496defe213a"} Sep 29 11:00:03 crc kubenswrapper[4779]: I0929 11:00:03.481619 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="511c42a21e4114fd780d94859f690b68ef4d0f1334a8c8c684a11496defe213a" Sep 29 11:00:03 crc kubenswrapper[4779]: I0929 11:00:03.481648 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319060-ntdzs" Sep 29 11:00:03 crc kubenswrapper[4779]: I0929 11:00:03.970987 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319015-4ptkv"] Sep 29 11:00:03 crc kubenswrapper[4779]: I0929 11:00:03.986985 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319015-4ptkv"] Sep 29 11:00:04 crc kubenswrapper[4779]: I0929 11:00:04.728440 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2ca761fb-d727-4f81-8a0e-455b1699bfcc" path="/var/lib/kubelet/pods/2ca761fb-d727-4f81-8a0e-455b1699bfcc/volumes" Sep 29 11:00:08 crc kubenswrapper[4779]: I0929 11:00:08.724150 4779 scope.go:117] "RemoveContainer" containerID="15b85b19dfd233a3ba37a665f3980c005f63a1bb436012dc1ed88412b757a289" Sep 29 11:00:08 crc kubenswrapper[4779]: E0929 11:00:08.728265 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 11:00:21 crc kubenswrapper[4779]: I0929 11:00:21.715865 4779 scope.go:117] "RemoveContainer" containerID="15b85b19dfd233a3ba37a665f3980c005f63a1bb436012dc1ed88412b757a289" Sep 29 11:00:21 crc kubenswrapper[4779]: E0929 11:00:21.717247 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 11:00:32 crc kubenswrapper[4779]: I0929 11:00:32.714879 4779 scope.go:117] "RemoveContainer" containerID="15b85b19dfd233a3ba37a665f3980c005f63a1bb436012dc1ed88412b757a289" Sep 29 11:00:32 crc kubenswrapper[4779]: E0929 11:00:32.715893 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 11:00:45 crc kubenswrapper[4779]: I0929 11:00:45.715371 4779 scope.go:117] "RemoveContainer" containerID="15b85b19dfd233a3ba37a665f3980c005f63a1bb436012dc1ed88412b757a289" Sep 29 11:00:45 crc kubenswrapper[4779]: E0929 11:00:45.716229 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 11:00:51 crc kubenswrapper[4779]: I0929 11:00:51.421022 4779 scope.go:117] "RemoveContainer" containerID="385d608e28994a585b53fb8931ce09d66a7e1fcae3312b9ffa3a737b624247f3" Sep 29 11:01:00 crc kubenswrapper[4779]: I0929 11:01:00.163796 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29319061-ldscx"] Sep 29 11:01:00 crc kubenswrapper[4779]: E0929 11:01:00.165872 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05ac78f1-a505-43df-88f0-1abbbd027d94" containerName="collect-profiles" Sep 29 11:01:00 crc kubenswrapper[4779]: I0929 11:01:00.165893 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="05ac78f1-a505-43df-88f0-1abbbd027d94" containerName="collect-profiles" Sep 29 11:01:00 crc kubenswrapper[4779]: I0929 11:01:00.166245 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="05ac78f1-a505-43df-88f0-1abbbd027d94" containerName="collect-profiles" Sep 29 11:01:00 crc kubenswrapper[4779]: I0929 11:01:00.167283 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29319061-ldscx" Sep 29 11:01:00 crc kubenswrapper[4779]: I0929 11:01:00.201727 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29319061-ldscx"] Sep 29 11:01:00 crc kubenswrapper[4779]: I0929 11:01:00.225307 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ce5ee5d0-93f2-4a7d-b8b1-b0614e83526d-fernet-keys\") pod \"keystone-cron-29319061-ldscx\" (UID: \"ce5ee5d0-93f2-4a7d-b8b1-b0614e83526d\") " pod="openstack/keystone-cron-29319061-ldscx" Sep 29 11:01:00 crc kubenswrapper[4779]: I0929 11:01:00.225383 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5nwkd\" (UniqueName: \"kubernetes.io/projected/ce5ee5d0-93f2-4a7d-b8b1-b0614e83526d-kube-api-access-5nwkd\") pod \"keystone-cron-29319061-ldscx\" (UID: \"ce5ee5d0-93f2-4a7d-b8b1-b0614e83526d\") " pod="openstack/keystone-cron-29319061-ldscx" Sep 29 11:01:00 crc kubenswrapper[4779]: I0929 11:01:00.225429 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce5ee5d0-93f2-4a7d-b8b1-b0614e83526d-combined-ca-bundle\") pod \"keystone-cron-29319061-ldscx\" (UID: \"ce5ee5d0-93f2-4a7d-b8b1-b0614e83526d\") " pod="openstack/keystone-cron-29319061-ldscx" Sep 29 11:01:00 crc kubenswrapper[4779]: I0929 11:01:00.225695 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce5ee5d0-93f2-4a7d-b8b1-b0614e83526d-config-data\") pod \"keystone-cron-29319061-ldscx\" (UID: \"ce5ee5d0-93f2-4a7d-b8b1-b0614e83526d\") " pod="openstack/keystone-cron-29319061-ldscx" Sep 29 11:01:00 crc kubenswrapper[4779]: I0929 11:01:00.328087 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce5ee5d0-93f2-4a7d-b8b1-b0614e83526d-config-data\") pod \"keystone-cron-29319061-ldscx\" (UID: \"ce5ee5d0-93f2-4a7d-b8b1-b0614e83526d\") " pod="openstack/keystone-cron-29319061-ldscx" Sep 29 11:01:00 crc kubenswrapper[4779]: I0929 11:01:00.328258 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ce5ee5d0-93f2-4a7d-b8b1-b0614e83526d-fernet-keys\") pod \"keystone-cron-29319061-ldscx\" (UID: \"ce5ee5d0-93f2-4a7d-b8b1-b0614e83526d\") " pod="openstack/keystone-cron-29319061-ldscx" Sep 29 11:01:00 crc kubenswrapper[4779]: I0929 11:01:00.328317 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5nwkd\" (UniqueName: \"kubernetes.io/projected/ce5ee5d0-93f2-4a7d-b8b1-b0614e83526d-kube-api-access-5nwkd\") pod \"keystone-cron-29319061-ldscx\" (UID: \"ce5ee5d0-93f2-4a7d-b8b1-b0614e83526d\") " pod="openstack/keystone-cron-29319061-ldscx" Sep 29 11:01:00 crc kubenswrapper[4779]: I0929 11:01:00.328375 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce5ee5d0-93f2-4a7d-b8b1-b0614e83526d-combined-ca-bundle\") pod \"keystone-cron-29319061-ldscx\" (UID: \"ce5ee5d0-93f2-4a7d-b8b1-b0614e83526d\") " pod="openstack/keystone-cron-29319061-ldscx" Sep 29 11:01:00 crc kubenswrapper[4779]: I0929 11:01:00.717693 4779 scope.go:117] "RemoveContainer" containerID="15b85b19dfd233a3ba37a665f3980c005f63a1bb436012dc1ed88412b757a289" Sep 29 11:01:00 crc kubenswrapper[4779]: E0929 11:01:00.718132 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 11:01:00 crc kubenswrapper[4779]: I0929 11:01:00.828803 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce5ee5d0-93f2-4a7d-b8b1-b0614e83526d-combined-ca-bundle\") pod \"keystone-cron-29319061-ldscx\" (UID: \"ce5ee5d0-93f2-4a7d-b8b1-b0614e83526d\") " pod="openstack/keystone-cron-29319061-ldscx" Sep 29 11:01:00 crc kubenswrapper[4779]: I0929 11:01:00.829398 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce5ee5d0-93f2-4a7d-b8b1-b0614e83526d-config-data\") pod \"keystone-cron-29319061-ldscx\" (UID: \"ce5ee5d0-93f2-4a7d-b8b1-b0614e83526d\") " pod="openstack/keystone-cron-29319061-ldscx" Sep 29 11:01:00 crc kubenswrapper[4779]: I0929 11:01:00.832572 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ce5ee5d0-93f2-4a7d-b8b1-b0614e83526d-fernet-keys\") pod \"keystone-cron-29319061-ldscx\" (UID: \"ce5ee5d0-93f2-4a7d-b8b1-b0614e83526d\") " pod="openstack/keystone-cron-29319061-ldscx" Sep 29 11:01:00 crc kubenswrapper[4779]: I0929 11:01:00.833203 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5nwkd\" (UniqueName: \"kubernetes.io/projected/ce5ee5d0-93f2-4a7d-b8b1-b0614e83526d-kube-api-access-5nwkd\") pod \"keystone-cron-29319061-ldscx\" (UID: \"ce5ee5d0-93f2-4a7d-b8b1-b0614e83526d\") " pod="openstack/keystone-cron-29319061-ldscx" Sep 29 11:01:01 crc kubenswrapper[4779]: I0929 11:01:01.098097 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29319061-ldscx" Sep 29 11:01:01 crc kubenswrapper[4779]: I0929 11:01:01.589174 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29319061-ldscx"] Sep 29 11:01:02 crc kubenswrapper[4779]: I0929 11:01:02.108330 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29319061-ldscx" event={"ID":"ce5ee5d0-93f2-4a7d-b8b1-b0614e83526d","Type":"ContainerStarted","Data":"f933e8e9b1989e1ff29d960f0931a130de10b567ba1629167143dfe818597e07"} Sep 29 11:01:02 crc kubenswrapper[4779]: I0929 11:01:02.108655 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29319061-ldscx" event={"ID":"ce5ee5d0-93f2-4a7d-b8b1-b0614e83526d","Type":"ContainerStarted","Data":"bef64066c8cca097713388db50b6f5909e85dab2ec5807be22a461eb03415c37"} Sep 29 11:01:02 crc kubenswrapper[4779]: I0929 11:01:02.128102 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29319061-ldscx" podStartSLOduration=2.128071333 podStartE2EDuration="2.128071333s" podCreationTimestamp="2025-09-29 11:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 11:01:02.127321902 +0000 UTC m=+5494.108645796" watchObservedRunningTime="2025-09-29 11:01:02.128071333 +0000 UTC m=+5494.109395247" Sep 29 11:01:06 crc kubenswrapper[4779]: I0929 11:01:06.157205 4779 generic.go:334] "Generic (PLEG): container finished" podID="ce5ee5d0-93f2-4a7d-b8b1-b0614e83526d" containerID="f933e8e9b1989e1ff29d960f0931a130de10b567ba1629167143dfe818597e07" exitCode=0 Sep 29 11:01:06 crc kubenswrapper[4779]: I0929 11:01:06.157302 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29319061-ldscx" event={"ID":"ce5ee5d0-93f2-4a7d-b8b1-b0614e83526d","Type":"ContainerDied","Data":"f933e8e9b1989e1ff29d960f0931a130de10b567ba1629167143dfe818597e07"} Sep 29 11:01:07 crc kubenswrapper[4779]: I0929 11:01:07.597690 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29319061-ldscx" Sep 29 11:01:07 crc kubenswrapper[4779]: I0929 11:01:07.710137 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce5ee5d0-93f2-4a7d-b8b1-b0614e83526d-config-data\") pod \"ce5ee5d0-93f2-4a7d-b8b1-b0614e83526d\" (UID: \"ce5ee5d0-93f2-4a7d-b8b1-b0614e83526d\") " Sep 29 11:01:07 crc kubenswrapper[4779]: I0929 11:01:07.710396 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5nwkd\" (UniqueName: \"kubernetes.io/projected/ce5ee5d0-93f2-4a7d-b8b1-b0614e83526d-kube-api-access-5nwkd\") pod \"ce5ee5d0-93f2-4a7d-b8b1-b0614e83526d\" (UID: \"ce5ee5d0-93f2-4a7d-b8b1-b0614e83526d\") " Sep 29 11:01:07 crc kubenswrapper[4779]: I0929 11:01:07.710431 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce5ee5d0-93f2-4a7d-b8b1-b0614e83526d-combined-ca-bundle\") pod \"ce5ee5d0-93f2-4a7d-b8b1-b0614e83526d\" (UID: \"ce5ee5d0-93f2-4a7d-b8b1-b0614e83526d\") " Sep 29 11:01:07 crc kubenswrapper[4779]: I0929 11:01:07.710581 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ce5ee5d0-93f2-4a7d-b8b1-b0614e83526d-fernet-keys\") pod \"ce5ee5d0-93f2-4a7d-b8b1-b0614e83526d\" (UID: \"ce5ee5d0-93f2-4a7d-b8b1-b0614e83526d\") " Sep 29 11:01:07 crc kubenswrapper[4779]: I0929 11:01:07.721213 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce5ee5d0-93f2-4a7d-b8b1-b0614e83526d-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "ce5ee5d0-93f2-4a7d-b8b1-b0614e83526d" (UID: "ce5ee5d0-93f2-4a7d-b8b1-b0614e83526d"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 11:01:07 crc kubenswrapper[4779]: I0929 11:01:07.723468 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ce5ee5d0-93f2-4a7d-b8b1-b0614e83526d-kube-api-access-5nwkd" (OuterVolumeSpecName: "kube-api-access-5nwkd") pod "ce5ee5d0-93f2-4a7d-b8b1-b0614e83526d" (UID: "ce5ee5d0-93f2-4a7d-b8b1-b0614e83526d"). InnerVolumeSpecName "kube-api-access-5nwkd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 11:01:07 crc kubenswrapper[4779]: I0929 11:01:07.791152 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce5ee5d0-93f2-4a7d-b8b1-b0614e83526d-config-data" (OuterVolumeSpecName: "config-data") pod "ce5ee5d0-93f2-4a7d-b8b1-b0614e83526d" (UID: "ce5ee5d0-93f2-4a7d-b8b1-b0614e83526d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 11:01:07 crc kubenswrapper[4779]: I0929 11:01:07.813940 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce5ee5d0-93f2-4a7d-b8b1-b0614e83526d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ce5ee5d0-93f2-4a7d-b8b1-b0614e83526d" (UID: "ce5ee5d0-93f2-4a7d-b8b1-b0614e83526d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 11:01:07 crc kubenswrapper[4779]: I0929 11:01:07.814058 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5nwkd\" (UniqueName: \"kubernetes.io/projected/ce5ee5d0-93f2-4a7d-b8b1-b0614e83526d-kube-api-access-5nwkd\") on node \"crc\" DevicePath \"\"" Sep 29 11:01:07 crc kubenswrapper[4779]: I0929 11:01:07.814082 4779 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce5ee5d0-93f2-4a7d-b8b1-b0614e83526d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 11:01:07 crc kubenswrapper[4779]: I0929 11:01:07.814093 4779 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ce5ee5d0-93f2-4a7d-b8b1-b0614e83526d-fernet-keys\") on node \"crc\" DevicePath \"\"" Sep 29 11:01:07 crc kubenswrapper[4779]: I0929 11:01:07.814102 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce5ee5d0-93f2-4a7d-b8b1-b0614e83526d-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 11:01:08 crc kubenswrapper[4779]: I0929 11:01:08.193382 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29319061-ldscx" event={"ID":"ce5ee5d0-93f2-4a7d-b8b1-b0614e83526d","Type":"ContainerDied","Data":"bef64066c8cca097713388db50b6f5909e85dab2ec5807be22a461eb03415c37"} Sep 29 11:01:08 crc kubenswrapper[4779]: I0929 11:01:08.193436 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bef64066c8cca097713388db50b6f5909e85dab2ec5807be22a461eb03415c37" Sep 29 11:01:08 crc kubenswrapper[4779]: I0929 11:01:08.193548 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29319061-ldscx" Sep 29 11:01:14 crc kubenswrapper[4779]: I0929 11:01:14.715026 4779 scope.go:117] "RemoveContainer" containerID="15b85b19dfd233a3ba37a665f3980c005f63a1bb436012dc1ed88412b757a289" Sep 29 11:01:14 crc kubenswrapper[4779]: E0929 11:01:14.716343 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 11:01:29 crc kubenswrapper[4779]: I0929 11:01:29.714934 4779 scope.go:117] "RemoveContainer" containerID="15b85b19dfd233a3ba37a665f3980c005f63a1bb436012dc1ed88412b757a289" Sep 29 11:01:29 crc kubenswrapper[4779]: E0929 11:01:29.715895 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 11:01:43 crc kubenswrapper[4779]: I0929 11:01:43.715278 4779 scope.go:117] "RemoveContainer" containerID="15b85b19dfd233a3ba37a665f3980c005f63a1bb436012dc1ed88412b757a289" Sep 29 11:01:43 crc kubenswrapper[4779]: E0929 11:01:43.716700 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 11:01:54 crc kubenswrapper[4779]: I0929 11:01:54.715578 4779 scope.go:117] "RemoveContainer" containerID="15b85b19dfd233a3ba37a665f3980c005f63a1bb436012dc1ed88412b757a289" Sep 29 11:01:54 crc kubenswrapper[4779]: E0929 11:01:54.716753 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 11:02:07 crc kubenswrapper[4779]: I0929 11:02:07.715611 4779 scope.go:117] "RemoveContainer" containerID="15b85b19dfd233a3ba37a665f3980c005f63a1bb436012dc1ed88412b757a289" Sep 29 11:02:07 crc kubenswrapper[4779]: E0929 11:02:07.717102 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 11:02:21 crc kubenswrapper[4779]: I0929 11:02:21.715374 4779 scope.go:117] "RemoveContainer" containerID="15b85b19dfd233a3ba37a665f3980c005f63a1bb436012dc1ed88412b757a289" Sep 29 11:02:21 crc kubenswrapper[4779]: E0929 11:02:21.716416 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 11:02:34 crc kubenswrapper[4779]: I0929 11:02:34.714808 4779 scope.go:117] "RemoveContainer" containerID="15b85b19dfd233a3ba37a665f3980c005f63a1bb436012dc1ed88412b757a289" Sep 29 11:02:34 crc kubenswrapper[4779]: E0929 11:02:34.715886 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 11:02:47 crc kubenswrapper[4779]: I0929 11:02:47.714500 4779 scope.go:117] "RemoveContainer" containerID="15b85b19dfd233a3ba37a665f3980c005f63a1bb436012dc1ed88412b757a289" Sep 29 11:02:48 crc kubenswrapper[4779]: I0929 11:02:48.329728 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" event={"ID":"f1a5d3a7-37d9-4a87-864c-e4af7f504a19","Type":"ContainerStarted","Data":"bc71e3138a88f410e3c8cab1951172d465b92f287dcb864022e4b8d363d22735"} Sep 29 11:03:31 crc kubenswrapper[4779]: I0929 11:03:31.049982 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-clph5"] Sep 29 11:03:31 crc kubenswrapper[4779]: E0929 11:03:31.051518 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce5ee5d0-93f2-4a7d-b8b1-b0614e83526d" containerName="keystone-cron" Sep 29 11:03:31 crc kubenswrapper[4779]: I0929 11:03:31.051535 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce5ee5d0-93f2-4a7d-b8b1-b0614e83526d" containerName="keystone-cron" Sep 29 11:03:31 crc kubenswrapper[4779]: I0929 11:03:31.052125 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce5ee5d0-93f2-4a7d-b8b1-b0614e83526d" containerName="keystone-cron" Sep 29 11:03:31 crc kubenswrapper[4779]: I0929 11:03:31.054208 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-clph5" Sep 29 11:03:31 crc kubenswrapper[4779]: I0929 11:03:31.066269 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-clph5"] Sep 29 11:03:31 crc kubenswrapper[4779]: I0929 11:03:31.072157 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0995458e-3d8b-414e-9025-4af0d167f634-catalog-content\") pod \"redhat-marketplace-clph5\" (UID: \"0995458e-3d8b-414e-9025-4af0d167f634\") " pod="openshift-marketplace/redhat-marketplace-clph5" Sep 29 11:03:31 crc kubenswrapper[4779]: I0929 11:03:31.072278 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gbxvx\" (UniqueName: \"kubernetes.io/projected/0995458e-3d8b-414e-9025-4af0d167f634-kube-api-access-gbxvx\") pod \"redhat-marketplace-clph5\" (UID: \"0995458e-3d8b-414e-9025-4af0d167f634\") " pod="openshift-marketplace/redhat-marketplace-clph5" Sep 29 11:03:31 crc kubenswrapper[4779]: I0929 11:03:31.072370 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0995458e-3d8b-414e-9025-4af0d167f634-utilities\") pod \"redhat-marketplace-clph5\" (UID: \"0995458e-3d8b-414e-9025-4af0d167f634\") " pod="openshift-marketplace/redhat-marketplace-clph5" Sep 29 11:03:31 crc kubenswrapper[4779]: I0929 11:03:31.175053 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0995458e-3d8b-414e-9025-4af0d167f634-catalog-content\") pod \"redhat-marketplace-clph5\" (UID: \"0995458e-3d8b-414e-9025-4af0d167f634\") " pod="openshift-marketplace/redhat-marketplace-clph5" Sep 29 11:03:31 crc kubenswrapper[4779]: I0929 11:03:31.175175 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gbxvx\" (UniqueName: \"kubernetes.io/projected/0995458e-3d8b-414e-9025-4af0d167f634-kube-api-access-gbxvx\") pod \"redhat-marketplace-clph5\" (UID: \"0995458e-3d8b-414e-9025-4af0d167f634\") " pod="openshift-marketplace/redhat-marketplace-clph5" Sep 29 11:03:31 crc kubenswrapper[4779]: I0929 11:03:31.175254 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0995458e-3d8b-414e-9025-4af0d167f634-utilities\") pod \"redhat-marketplace-clph5\" (UID: \"0995458e-3d8b-414e-9025-4af0d167f634\") " pod="openshift-marketplace/redhat-marketplace-clph5" Sep 29 11:03:31 crc kubenswrapper[4779]: I0929 11:03:31.175879 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0995458e-3d8b-414e-9025-4af0d167f634-catalog-content\") pod \"redhat-marketplace-clph5\" (UID: \"0995458e-3d8b-414e-9025-4af0d167f634\") " pod="openshift-marketplace/redhat-marketplace-clph5" Sep 29 11:03:31 crc kubenswrapper[4779]: I0929 11:03:31.175960 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0995458e-3d8b-414e-9025-4af0d167f634-utilities\") pod \"redhat-marketplace-clph5\" (UID: \"0995458e-3d8b-414e-9025-4af0d167f634\") " pod="openshift-marketplace/redhat-marketplace-clph5" Sep 29 11:03:31 crc kubenswrapper[4779]: I0929 11:03:31.215987 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gbxvx\" (UniqueName: \"kubernetes.io/projected/0995458e-3d8b-414e-9025-4af0d167f634-kube-api-access-gbxvx\") pod \"redhat-marketplace-clph5\" (UID: \"0995458e-3d8b-414e-9025-4af0d167f634\") " pod="openshift-marketplace/redhat-marketplace-clph5" Sep 29 11:03:31 crc kubenswrapper[4779]: I0929 11:03:31.398976 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-clph5" Sep 29 11:03:31 crc kubenswrapper[4779]: I0929 11:03:31.917611 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-clph5"] Sep 29 11:03:32 crc kubenswrapper[4779]: I0929 11:03:32.850816 4779 generic.go:334] "Generic (PLEG): container finished" podID="0995458e-3d8b-414e-9025-4af0d167f634" containerID="68ad1b9526f0b4c8034f90c5143f03fc27f9fb7039b219c5109a75ecad5fadae" exitCode=0 Sep 29 11:03:32 crc kubenswrapper[4779]: I0929 11:03:32.851260 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-clph5" event={"ID":"0995458e-3d8b-414e-9025-4af0d167f634","Type":"ContainerDied","Data":"68ad1b9526f0b4c8034f90c5143f03fc27f9fb7039b219c5109a75ecad5fadae"} Sep 29 11:03:32 crc kubenswrapper[4779]: I0929 11:03:32.851304 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-clph5" event={"ID":"0995458e-3d8b-414e-9025-4af0d167f634","Type":"ContainerStarted","Data":"841636ea1f9723f2d90ba34a06ff51dd6cabe8fa093fca862f64f89edfd81319"} Sep 29 11:03:34 crc kubenswrapper[4779]: I0929 11:03:34.899927 4779 generic.go:334] "Generic (PLEG): container finished" podID="0995458e-3d8b-414e-9025-4af0d167f634" containerID="dc393906eca25640fc64febe4f6688ee98c6f857a8f681597160b92591d05dec" exitCode=0 Sep 29 11:03:34 crc kubenswrapper[4779]: I0929 11:03:34.901014 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-clph5" event={"ID":"0995458e-3d8b-414e-9025-4af0d167f634","Type":"ContainerDied","Data":"dc393906eca25640fc64febe4f6688ee98c6f857a8f681597160b92591d05dec"} Sep 29 11:03:35 crc kubenswrapper[4779]: I0929 11:03:35.914434 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-clph5" event={"ID":"0995458e-3d8b-414e-9025-4af0d167f634","Type":"ContainerStarted","Data":"7b29d2363cc796dcb15a5f710984a11d32ed9bfea33700e69f76abbddbd2c19c"} Sep 29 11:03:35 crc kubenswrapper[4779]: I0929 11:03:35.956422 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-clph5" podStartSLOduration=2.5318151650000003 podStartE2EDuration="4.956395285s" podCreationTimestamp="2025-09-29 11:03:31 +0000 UTC" firstStartedPulling="2025-09-29 11:03:32.853965017 +0000 UTC m=+5644.835288941" lastFinishedPulling="2025-09-29 11:03:35.278545157 +0000 UTC m=+5647.259869061" observedRunningTime="2025-09-29 11:03:35.950545455 +0000 UTC m=+5647.931869369" watchObservedRunningTime="2025-09-29 11:03:35.956395285 +0000 UTC m=+5647.937719189" Sep 29 11:03:41 crc kubenswrapper[4779]: I0929 11:03:41.399397 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-clph5" Sep 29 11:03:41 crc kubenswrapper[4779]: I0929 11:03:41.400313 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-clph5" Sep 29 11:03:41 crc kubenswrapper[4779]: I0929 11:03:41.457885 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-clph5" Sep 29 11:03:42 crc kubenswrapper[4779]: I0929 11:03:42.033634 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-clph5" Sep 29 11:03:42 crc kubenswrapper[4779]: I0929 11:03:42.088066 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-clph5"] Sep 29 11:03:44 crc kubenswrapper[4779]: I0929 11:03:44.001663 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-clph5" podUID="0995458e-3d8b-414e-9025-4af0d167f634" containerName="registry-server" containerID="cri-o://7b29d2363cc796dcb15a5f710984a11d32ed9bfea33700e69f76abbddbd2c19c" gracePeriod=2 Sep 29 11:03:44 crc kubenswrapper[4779]: I0929 11:03:44.498135 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-clph5" Sep 29 11:03:44 crc kubenswrapper[4779]: I0929 11:03:44.622534 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gbxvx\" (UniqueName: \"kubernetes.io/projected/0995458e-3d8b-414e-9025-4af0d167f634-kube-api-access-gbxvx\") pod \"0995458e-3d8b-414e-9025-4af0d167f634\" (UID: \"0995458e-3d8b-414e-9025-4af0d167f634\") " Sep 29 11:03:44 crc kubenswrapper[4779]: I0929 11:03:44.622670 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0995458e-3d8b-414e-9025-4af0d167f634-catalog-content\") pod \"0995458e-3d8b-414e-9025-4af0d167f634\" (UID: \"0995458e-3d8b-414e-9025-4af0d167f634\") " Sep 29 11:03:44 crc kubenswrapper[4779]: I0929 11:03:44.622752 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0995458e-3d8b-414e-9025-4af0d167f634-utilities\") pod \"0995458e-3d8b-414e-9025-4af0d167f634\" (UID: \"0995458e-3d8b-414e-9025-4af0d167f634\") " Sep 29 11:03:44 crc kubenswrapper[4779]: I0929 11:03:44.624191 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0995458e-3d8b-414e-9025-4af0d167f634-utilities" (OuterVolumeSpecName: "utilities") pod "0995458e-3d8b-414e-9025-4af0d167f634" (UID: "0995458e-3d8b-414e-9025-4af0d167f634"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 11:03:44 crc kubenswrapper[4779]: I0929 11:03:44.637206 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0995458e-3d8b-414e-9025-4af0d167f634-kube-api-access-gbxvx" (OuterVolumeSpecName: "kube-api-access-gbxvx") pod "0995458e-3d8b-414e-9025-4af0d167f634" (UID: "0995458e-3d8b-414e-9025-4af0d167f634"). InnerVolumeSpecName "kube-api-access-gbxvx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 11:03:44 crc kubenswrapper[4779]: I0929 11:03:44.639667 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0995458e-3d8b-414e-9025-4af0d167f634-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0995458e-3d8b-414e-9025-4af0d167f634" (UID: "0995458e-3d8b-414e-9025-4af0d167f634"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 11:03:44 crc kubenswrapper[4779]: I0929 11:03:44.725160 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gbxvx\" (UniqueName: \"kubernetes.io/projected/0995458e-3d8b-414e-9025-4af0d167f634-kube-api-access-gbxvx\") on node \"crc\" DevicePath \"\"" Sep 29 11:03:44 crc kubenswrapper[4779]: I0929 11:03:44.725199 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0995458e-3d8b-414e-9025-4af0d167f634-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 11:03:44 crc kubenswrapper[4779]: I0929 11:03:44.725213 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0995458e-3d8b-414e-9025-4af0d167f634-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 11:03:45 crc kubenswrapper[4779]: I0929 11:03:45.018823 4779 generic.go:334] "Generic (PLEG): container finished" podID="0995458e-3d8b-414e-9025-4af0d167f634" containerID="7b29d2363cc796dcb15a5f710984a11d32ed9bfea33700e69f76abbddbd2c19c" exitCode=0 Sep 29 11:03:45 crc kubenswrapper[4779]: I0929 11:03:45.018936 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-clph5" event={"ID":"0995458e-3d8b-414e-9025-4af0d167f634","Type":"ContainerDied","Data":"7b29d2363cc796dcb15a5f710984a11d32ed9bfea33700e69f76abbddbd2c19c"} Sep 29 11:03:45 crc kubenswrapper[4779]: I0929 11:03:45.019430 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-clph5" event={"ID":"0995458e-3d8b-414e-9025-4af0d167f634","Type":"ContainerDied","Data":"841636ea1f9723f2d90ba34a06ff51dd6cabe8fa093fca862f64f89edfd81319"} Sep 29 11:03:45 crc kubenswrapper[4779]: I0929 11:03:45.019460 4779 scope.go:117] "RemoveContainer" containerID="7b29d2363cc796dcb15a5f710984a11d32ed9bfea33700e69f76abbddbd2c19c" Sep 29 11:03:45 crc kubenswrapper[4779]: I0929 11:03:45.018969 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-clph5" Sep 29 11:03:45 crc kubenswrapper[4779]: I0929 11:03:45.047094 4779 scope.go:117] "RemoveContainer" containerID="dc393906eca25640fc64febe4f6688ee98c6f857a8f681597160b92591d05dec" Sep 29 11:03:45 crc kubenswrapper[4779]: I0929 11:03:45.075129 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-clph5"] Sep 29 11:03:45 crc kubenswrapper[4779]: I0929 11:03:45.081000 4779 scope.go:117] "RemoveContainer" containerID="68ad1b9526f0b4c8034f90c5143f03fc27f9fb7039b219c5109a75ecad5fadae" Sep 29 11:03:45 crc kubenswrapper[4779]: I0929 11:03:45.092796 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-clph5"] Sep 29 11:03:45 crc kubenswrapper[4779]: I0929 11:03:45.136307 4779 scope.go:117] "RemoveContainer" containerID="7b29d2363cc796dcb15a5f710984a11d32ed9bfea33700e69f76abbddbd2c19c" Sep 29 11:03:45 crc kubenswrapper[4779]: E0929 11:03:45.136871 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7b29d2363cc796dcb15a5f710984a11d32ed9bfea33700e69f76abbddbd2c19c\": container with ID starting with 7b29d2363cc796dcb15a5f710984a11d32ed9bfea33700e69f76abbddbd2c19c not found: ID does not exist" containerID="7b29d2363cc796dcb15a5f710984a11d32ed9bfea33700e69f76abbddbd2c19c" Sep 29 11:03:45 crc kubenswrapper[4779]: I0929 11:03:45.136922 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b29d2363cc796dcb15a5f710984a11d32ed9bfea33700e69f76abbddbd2c19c"} err="failed to get container status \"7b29d2363cc796dcb15a5f710984a11d32ed9bfea33700e69f76abbddbd2c19c\": rpc error: code = NotFound desc = could not find container \"7b29d2363cc796dcb15a5f710984a11d32ed9bfea33700e69f76abbddbd2c19c\": container with ID starting with 7b29d2363cc796dcb15a5f710984a11d32ed9bfea33700e69f76abbddbd2c19c not found: ID does not exist" Sep 29 11:03:45 crc kubenswrapper[4779]: I0929 11:03:45.136954 4779 scope.go:117] "RemoveContainer" containerID="dc393906eca25640fc64febe4f6688ee98c6f857a8f681597160b92591d05dec" Sep 29 11:03:45 crc kubenswrapper[4779]: E0929 11:03:45.137435 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dc393906eca25640fc64febe4f6688ee98c6f857a8f681597160b92591d05dec\": container with ID starting with dc393906eca25640fc64febe4f6688ee98c6f857a8f681597160b92591d05dec not found: ID does not exist" containerID="dc393906eca25640fc64febe4f6688ee98c6f857a8f681597160b92591d05dec" Sep 29 11:03:45 crc kubenswrapper[4779]: I0929 11:03:45.137460 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc393906eca25640fc64febe4f6688ee98c6f857a8f681597160b92591d05dec"} err="failed to get container status \"dc393906eca25640fc64febe4f6688ee98c6f857a8f681597160b92591d05dec\": rpc error: code = NotFound desc = could not find container \"dc393906eca25640fc64febe4f6688ee98c6f857a8f681597160b92591d05dec\": container with ID starting with dc393906eca25640fc64febe4f6688ee98c6f857a8f681597160b92591d05dec not found: ID does not exist" Sep 29 11:03:45 crc kubenswrapper[4779]: I0929 11:03:45.137476 4779 scope.go:117] "RemoveContainer" containerID="68ad1b9526f0b4c8034f90c5143f03fc27f9fb7039b219c5109a75ecad5fadae" Sep 29 11:03:45 crc kubenswrapper[4779]: E0929 11:03:45.137740 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"68ad1b9526f0b4c8034f90c5143f03fc27f9fb7039b219c5109a75ecad5fadae\": container with ID starting with 68ad1b9526f0b4c8034f90c5143f03fc27f9fb7039b219c5109a75ecad5fadae not found: ID does not exist" containerID="68ad1b9526f0b4c8034f90c5143f03fc27f9fb7039b219c5109a75ecad5fadae" Sep 29 11:03:45 crc kubenswrapper[4779]: I0929 11:03:45.137764 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"68ad1b9526f0b4c8034f90c5143f03fc27f9fb7039b219c5109a75ecad5fadae"} err="failed to get container status \"68ad1b9526f0b4c8034f90c5143f03fc27f9fb7039b219c5109a75ecad5fadae\": rpc error: code = NotFound desc = could not find container \"68ad1b9526f0b4c8034f90c5143f03fc27f9fb7039b219c5109a75ecad5fadae\": container with ID starting with 68ad1b9526f0b4c8034f90c5143f03fc27f9fb7039b219c5109a75ecad5fadae not found: ID does not exist" Sep 29 11:03:46 crc kubenswrapper[4779]: I0929 11:03:46.728552 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0995458e-3d8b-414e-9025-4af0d167f634" path="/var/lib/kubelet/pods/0995458e-3d8b-414e-9025-4af0d167f634/volumes" Sep 29 11:04:14 crc kubenswrapper[4779]: I0929 11:04:14.906669 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-zc2kj"] Sep 29 11:04:14 crc kubenswrapper[4779]: E0929 11:04:14.908273 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0995458e-3d8b-414e-9025-4af0d167f634" containerName="extract-content" Sep 29 11:04:14 crc kubenswrapper[4779]: I0929 11:04:14.908303 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="0995458e-3d8b-414e-9025-4af0d167f634" containerName="extract-content" Sep 29 11:04:14 crc kubenswrapper[4779]: E0929 11:04:14.908360 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0995458e-3d8b-414e-9025-4af0d167f634" containerName="extract-utilities" Sep 29 11:04:14 crc kubenswrapper[4779]: I0929 11:04:14.908371 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="0995458e-3d8b-414e-9025-4af0d167f634" containerName="extract-utilities" Sep 29 11:04:14 crc kubenswrapper[4779]: E0929 11:04:14.908396 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0995458e-3d8b-414e-9025-4af0d167f634" containerName="registry-server" Sep 29 11:04:14 crc kubenswrapper[4779]: I0929 11:04:14.908406 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="0995458e-3d8b-414e-9025-4af0d167f634" containerName="registry-server" Sep 29 11:04:14 crc kubenswrapper[4779]: I0929 11:04:14.908683 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="0995458e-3d8b-414e-9025-4af0d167f634" containerName="registry-server" Sep 29 11:04:14 crc kubenswrapper[4779]: I0929 11:04:14.912016 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zc2kj" Sep 29 11:04:14 crc kubenswrapper[4779]: I0929 11:04:14.926447 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zc2kj"] Sep 29 11:04:15 crc kubenswrapper[4779]: I0929 11:04:15.078460 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d0c0ff81-b037-480d-8ab4-dfe1d6703938-utilities\") pod \"redhat-operators-zc2kj\" (UID: \"d0c0ff81-b037-480d-8ab4-dfe1d6703938\") " pod="openshift-marketplace/redhat-operators-zc2kj" Sep 29 11:04:15 crc kubenswrapper[4779]: I0929 11:04:15.079211 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g7c4p\" (UniqueName: \"kubernetes.io/projected/d0c0ff81-b037-480d-8ab4-dfe1d6703938-kube-api-access-g7c4p\") pod \"redhat-operators-zc2kj\" (UID: \"d0c0ff81-b037-480d-8ab4-dfe1d6703938\") " pod="openshift-marketplace/redhat-operators-zc2kj" Sep 29 11:04:15 crc kubenswrapper[4779]: I0929 11:04:15.079478 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d0c0ff81-b037-480d-8ab4-dfe1d6703938-catalog-content\") pod \"redhat-operators-zc2kj\" (UID: \"d0c0ff81-b037-480d-8ab4-dfe1d6703938\") " pod="openshift-marketplace/redhat-operators-zc2kj" Sep 29 11:04:15 crc kubenswrapper[4779]: I0929 11:04:15.182720 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g7c4p\" (UniqueName: \"kubernetes.io/projected/d0c0ff81-b037-480d-8ab4-dfe1d6703938-kube-api-access-g7c4p\") pod \"redhat-operators-zc2kj\" (UID: \"d0c0ff81-b037-480d-8ab4-dfe1d6703938\") " pod="openshift-marketplace/redhat-operators-zc2kj" Sep 29 11:04:15 crc kubenswrapper[4779]: I0929 11:04:15.182947 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d0c0ff81-b037-480d-8ab4-dfe1d6703938-catalog-content\") pod \"redhat-operators-zc2kj\" (UID: \"d0c0ff81-b037-480d-8ab4-dfe1d6703938\") " pod="openshift-marketplace/redhat-operators-zc2kj" Sep 29 11:04:15 crc kubenswrapper[4779]: I0929 11:04:15.183666 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d0c0ff81-b037-480d-8ab4-dfe1d6703938-catalog-content\") pod \"redhat-operators-zc2kj\" (UID: \"d0c0ff81-b037-480d-8ab4-dfe1d6703938\") " pod="openshift-marketplace/redhat-operators-zc2kj" Sep 29 11:04:15 crc kubenswrapper[4779]: I0929 11:04:15.183746 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d0c0ff81-b037-480d-8ab4-dfe1d6703938-utilities\") pod \"redhat-operators-zc2kj\" (UID: \"d0c0ff81-b037-480d-8ab4-dfe1d6703938\") " pod="openshift-marketplace/redhat-operators-zc2kj" Sep 29 11:04:15 crc kubenswrapper[4779]: I0929 11:04:15.184086 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d0c0ff81-b037-480d-8ab4-dfe1d6703938-utilities\") pod \"redhat-operators-zc2kj\" (UID: \"d0c0ff81-b037-480d-8ab4-dfe1d6703938\") " pod="openshift-marketplace/redhat-operators-zc2kj" Sep 29 11:04:15 crc kubenswrapper[4779]: I0929 11:04:15.224773 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g7c4p\" (UniqueName: \"kubernetes.io/projected/d0c0ff81-b037-480d-8ab4-dfe1d6703938-kube-api-access-g7c4p\") pod \"redhat-operators-zc2kj\" (UID: \"d0c0ff81-b037-480d-8ab4-dfe1d6703938\") " pod="openshift-marketplace/redhat-operators-zc2kj" Sep 29 11:04:15 crc kubenswrapper[4779]: I0929 11:04:15.251085 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zc2kj" Sep 29 11:04:15 crc kubenswrapper[4779]: I0929 11:04:15.811937 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zc2kj"] Sep 29 11:04:16 crc kubenswrapper[4779]: I0929 11:04:16.403855 4779 generic.go:334] "Generic (PLEG): container finished" podID="d0c0ff81-b037-480d-8ab4-dfe1d6703938" containerID="62245173fc2e7947b376358759349fc52cc5e33fe9015881fbc4bd7fb446a427" exitCode=0 Sep 29 11:04:16 crc kubenswrapper[4779]: I0929 11:04:16.403999 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zc2kj" event={"ID":"d0c0ff81-b037-480d-8ab4-dfe1d6703938","Type":"ContainerDied","Data":"62245173fc2e7947b376358759349fc52cc5e33fe9015881fbc4bd7fb446a427"} Sep 29 11:04:16 crc kubenswrapper[4779]: I0929 11:04:16.404317 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zc2kj" event={"ID":"d0c0ff81-b037-480d-8ab4-dfe1d6703938","Type":"ContainerStarted","Data":"64f846675b41391934d2e4c68a9945a5b7dc987a924673984fab193aa84b61d2"} Sep 29 11:04:16 crc kubenswrapper[4779]: I0929 11:04:16.406662 4779 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 11:04:27 crc kubenswrapper[4779]: I0929 11:04:27.568618 4779 generic.go:334] "Generic (PLEG): container finished" podID="d0c0ff81-b037-480d-8ab4-dfe1d6703938" containerID="8d013e157bc8167985e9695b237dff0ea9f4a0a822e2616113116c392863ba1e" exitCode=0 Sep 29 11:04:27 crc kubenswrapper[4779]: I0929 11:04:27.568801 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zc2kj" event={"ID":"d0c0ff81-b037-480d-8ab4-dfe1d6703938","Type":"ContainerDied","Data":"8d013e157bc8167985e9695b237dff0ea9f4a0a822e2616113116c392863ba1e"} Sep 29 11:04:28 crc kubenswrapper[4779]: I0929 11:04:28.584061 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zc2kj" event={"ID":"d0c0ff81-b037-480d-8ab4-dfe1d6703938","Type":"ContainerStarted","Data":"e5316977f3f296072a5bcab7c4806b26a88b293fb719ada7a210f27b0ad1f406"} Sep 29 11:04:28 crc kubenswrapper[4779]: I0929 11:04:28.610854 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-zc2kj" podStartSLOduration=3.010185535 podStartE2EDuration="14.61082602s" podCreationTimestamp="2025-09-29 11:04:14 +0000 UTC" firstStartedPulling="2025-09-29 11:04:16.406281147 +0000 UTC m=+5688.387605051" lastFinishedPulling="2025-09-29 11:04:28.006921632 +0000 UTC m=+5699.988245536" observedRunningTime="2025-09-29 11:04:28.600957823 +0000 UTC m=+5700.582281737" watchObservedRunningTime="2025-09-29 11:04:28.61082602 +0000 UTC m=+5700.592149924" Sep 29 11:04:35 crc kubenswrapper[4779]: I0929 11:04:35.255355 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-zc2kj" Sep 29 11:04:35 crc kubenswrapper[4779]: I0929 11:04:35.256002 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-zc2kj" Sep 29 11:04:35 crc kubenswrapper[4779]: I0929 11:04:35.316857 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-zc2kj" Sep 29 11:04:35 crc kubenswrapper[4779]: I0929 11:04:35.711473 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-zc2kj" Sep 29 11:04:35 crc kubenswrapper[4779]: I0929 11:04:35.804460 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zc2kj"] Sep 29 11:04:35 crc kubenswrapper[4779]: I0929 11:04:35.857030 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zvkb9"] Sep 29 11:04:35 crc kubenswrapper[4779]: I0929 11:04:35.857378 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-zvkb9" podUID="9cab2663-d571-4988-b005-338943d811f5" containerName="registry-server" containerID="cri-o://ba7344b82f80ec6209d9d66911e67eba0719795f36ef788cc51a73855cc35131" gracePeriod=2 Sep 29 11:04:36 crc kubenswrapper[4779]: I0929 11:04:36.446786 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zvkb9" Sep 29 11:04:36 crc kubenswrapper[4779]: I0929 11:04:36.564339 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tl7kj\" (UniqueName: \"kubernetes.io/projected/9cab2663-d571-4988-b005-338943d811f5-kube-api-access-tl7kj\") pod \"9cab2663-d571-4988-b005-338943d811f5\" (UID: \"9cab2663-d571-4988-b005-338943d811f5\") " Sep 29 11:04:36 crc kubenswrapper[4779]: I0929 11:04:36.564577 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9cab2663-d571-4988-b005-338943d811f5-catalog-content\") pod \"9cab2663-d571-4988-b005-338943d811f5\" (UID: \"9cab2663-d571-4988-b005-338943d811f5\") " Sep 29 11:04:36 crc kubenswrapper[4779]: I0929 11:04:36.564683 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9cab2663-d571-4988-b005-338943d811f5-utilities\") pod \"9cab2663-d571-4988-b005-338943d811f5\" (UID: \"9cab2663-d571-4988-b005-338943d811f5\") " Sep 29 11:04:36 crc kubenswrapper[4779]: I0929 11:04:36.566009 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9cab2663-d571-4988-b005-338943d811f5-utilities" (OuterVolumeSpecName: "utilities") pod "9cab2663-d571-4988-b005-338943d811f5" (UID: "9cab2663-d571-4988-b005-338943d811f5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 11:04:36 crc kubenswrapper[4779]: I0929 11:04:36.588948 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9cab2663-d571-4988-b005-338943d811f5-kube-api-access-tl7kj" (OuterVolumeSpecName: "kube-api-access-tl7kj") pod "9cab2663-d571-4988-b005-338943d811f5" (UID: "9cab2663-d571-4988-b005-338943d811f5"). InnerVolumeSpecName "kube-api-access-tl7kj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 11:04:36 crc kubenswrapper[4779]: I0929 11:04:36.662876 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9cab2663-d571-4988-b005-338943d811f5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9cab2663-d571-4988-b005-338943d811f5" (UID: "9cab2663-d571-4988-b005-338943d811f5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 11:04:36 crc kubenswrapper[4779]: I0929 11:04:36.667462 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9cab2663-d571-4988-b005-338943d811f5-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 11:04:36 crc kubenswrapper[4779]: I0929 11:04:36.667728 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9cab2663-d571-4988-b005-338943d811f5-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 11:04:36 crc kubenswrapper[4779]: I0929 11:04:36.667853 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tl7kj\" (UniqueName: \"kubernetes.io/projected/9cab2663-d571-4988-b005-338943d811f5-kube-api-access-tl7kj\") on node \"crc\" DevicePath \"\"" Sep 29 11:04:36 crc kubenswrapper[4779]: I0929 11:04:36.671823 4779 generic.go:334] "Generic (PLEG): container finished" podID="9cab2663-d571-4988-b005-338943d811f5" containerID="ba7344b82f80ec6209d9d66911e67eba0719795f36ef788cc51a73855cc35131" exitCode=0 Sep 29 11:04:36 crc kubenswrapper[4779]: I0929 11:04:36.672049 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zvkb9" Sep 29 11:04:36 crc kubenswrapper[4779]: I0929 11:04:36.672072 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zvkb9" event={"ID":"9cab2663-d571-4988-b005-338943d811f5","Type":"ContainerDied","Data":"ba7344b82f80ec6209d9d66911e67eba0719795f36ef788cc51a73855cc35131"} Sep 29 11:04:36 crc kubenswrapper[4779]: I0929 11:04:36.672307 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zvkb9" event={"ID":"9cab2663-d571-4988-b005-338943d811f5","Type":"ContainerDied","Data":"67329ad6e22fd767269d76229e7d926ac53d23969377811b3147768af3b77d6e"} Sep 29 11:04:36 crc kubenswrapper[4779]: I0929 11:04:36.672346 4779 scope.go:117] "RemoveContainer" containerID="ba7344b82f80ec6209d9d66911e67eba0719795f36ef788cc51a73855cc35131" Sep 29 11:04:36 crc kubenswrapper[4779]: I0929 11:04:36.707979 4779 scope.go:117] "RemoveContainer" containerID="287f2afb10e05c92273efcd69105b2f0e4d3dede642e2b48551b8fa82a9ab27a" Sep 29 11:04:36 crc kubenswrapper[4779]: I0929 11:04:36.731514 4779 scope.go:117] "RemoveContainer" containerID="21a4e5da670e87a5d751a02232ff3891669446d2531fa4f0f247da008a131c77" Sep 29 11:04:36 crc kubenswrapper[4779]: I0929 11:04:36.737778 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zvkb9"] Sep 29 11:04:36 crc kubenswrapper[4779]: I0929 11:04:36.737817 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-zvkb9"] Sep 29 11:04:36 crc kubenswrapper[4779]: I0929 11:04:36.791658 4779 scope.go:117] "RemoveContainer" containerID="ba7344b82f80ec6209d9d66911e67eba0719795f36ef788cc51a73855cc35131" Sep 29 11:04:36 crc kubenswrapper[4779]: E0929 11:04:36.792324 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ba7344b82f80ec6209d9d66911e67eba0719795f36ef788cc51a73855cc35131\": container with ID starting with ba7344b82f80ec6209d9d66911e67eba0719795f36ef788cc51a73855cc35131 not found: ID does not exist" containerID="ba7344b82f80ec6209d9d66911e67eba0719795f36ef788cc51a73855cc35131" Sep 29 11:04:36 crc kubenswrapper[4779]: I0929 11:04:36.792384 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba7344b82f80ec6209d9d66911e67eba0719795f36ef788cc51a73855cc35131"} err="failed to get container status \"ba7344b82f80ec6209d9d66911e67eba0719795f36ef788cc51a73855cc35131\": rpc error: code = NotFound desc = could not find container \"ba7344b82f80ec6209d9d66911e67eba0719795f36ef788cc51a73855cc35131\": container with ID starting with ba7344b82f80ec6209d9d66911e67eba0719795f36ef788cc51a73855cc35131 not found: ID does not exist" Sep 29 11:04:36 crc kubenswrapper[4779]: I0929 11:04:36.792421 4779 scope.go:117] "RemoveContainer" containerID="287f2afb10e05c92273efcd69105b2f0e4d3dede642e2b48551b8fa82a9ab27a" Sep 29 11:04:36 crc kubenswrapper[4779]: E0929 11:04:36.792951 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"287f2afb10e05c92273efcd69105b2f0e4d3dede642e2b48551b8fa82a9ab27a\": container with ID starting with 287f2afb10e05c92273efcd69105b2f0e4d3dede642e2b48551b8fa82a9ab27a not found: ID does not exist" containerID="287f2afb10e05c92273efcd69105b2f0e4d3dede642e2b48551b8fa82a9ab27a" Sep 29 11:04:36 crc kubenswrapper[4779]: I0929 11:04:36.792994 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"287f2afb10e05c92273efcd69105b2f0e4d3dede642e2b48551b8fa82a9ab27a"} err="failed to get container status \"287f2afb10e05c92273efcd69105b2f0e4d3dede642e2b48551b8fa82a9ab27a\": rpc error: code = NotFound desc = could not find container \"287f2afb10e05c92273efcd69105b2f0e4d3dede642e2b48551b8fa82a9ab27a\": container with ID starting with 287f2afb10e05c92273efcd69105b2f0e4d3dede642e2b48551b8fa82a9ab27a not found: ID does not exist" Sep 29 11:04:36 crc kubenswrapper[4779]: I0929 11:04:36.793022 4779 scope.go:117] "RemoveContainer" containerID="21a4e5da670e87a5d751a02232ff3891669446d2531fa4f0f247da008a131c77" Sep 29 11:04:36 crc kubenswrapper[4779]: E0929 11:04:36.793519 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"21a4e5da670e87a5d751a02232ff3891669446d2531fa4f0f247da008a131c77\": container with ID starting with 21a4e5da670e87a5d751a02232ff3891669446d2531fa4f0f247da008a131c77 not found: ID does not exist" containerID="21a4e5da670e87a5d751a02232ff3891669446d2531fa4f0f247da008a131c77" Sep 29 11:04:36 crc kubenswrapper[4779]: I0929 11:04:36.793549 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"21a4e5da670e87a5d751a02232ff3891669446d2531fa4f0f247da008a131c77"} err="failed to get container status \"21a4e5da670e87a5d751a02232ff3891669446d2531fa4f0f247da008a131c77\": rpc error: code = NotFound desc = could not find container \"21a4e5da670e87a5d751a02232ff3891669446d2531fa4f0f247da008a131c77\": container with ID starting with 21a4e5da670e87a5d751a02232ff3891669446d2531fa4f0f247da008a131c77 not found: ID does not exist" Sep 29 11:04:38 crc kubenswrapper[4779]: I0929 11:04:38.724987 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9cab2663-d571-4988-b005-338943d811f5" path="/var/lib/kubelet/pods/9cab2663-d571-4988-b005-338943d811f5/volumes" Sep 29 11:05:16 crc kubenswrapper[4779]: I0929 11:05:16.966783 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 11:05:16 crc kubenswrapper[4779]: I0929 11:05:16.967618 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 11:05:46 crc kubenswrapper[4779]: I0929 11:05:46.966679 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 11:05:46 crc kubenswrapper[4779]: I0929 11:05:46.968118 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 11:06:16 crc kubenswrapper[4779]: I0929 11:06:16.967113 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 11:06:16 crc kubenswrapper[4779]: I0929 11:06:16.967886 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 11:06:16 crc kubenswrapper[4779]: I0929 11:06:16.967968 4779 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" Sep 29 11:06:16 crc kubenswrapper[4779]: I0929 11:06:16.969037 4779 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"bc71e3138a88f410e3c8cab1951172d465b92f287dcb864022e4b8d363d22735"} pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 11:06:16 crc kubenswrapper[4779]: I0929 11:06:16.969099 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" containerID="cri-o://bc71e3138a88f410e3c8cab1951172d465b92f287dcb864022e4b8d363d22735" gracePeriod=600 Sep 29 11:06:17 crc kubenswrapper[4779]: I0929 11:06:17.820374 4779 generic.go:334] "Generic (PLEG): container finished" podID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerID="bc71e3138a88f410e3c8cab1951172d465b92f287dcb864022e4b8d363d22735" exitCode=0 Sep 29 11:06:17 crc kubenswrapper[4779]: I0929 11:06:17.820453 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" event={"ID":"f1a5d3a7-37d9-4a87-864c-e4af7f504a19","Type":"ContainerDied","Data":"bc71e3138a88f410e3c8cab1951172d465b92f287dcb864022e4b8d363d22735"} Sep 29 11:06:17 crc kubenswrapper[4779]: I0929 11:06:17.821259 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" event={"ID":"f1a5d3a7-37d9-4a87-864c-e4af7f504a19","Type":"ContainerStarted","Data":"0d6f68afc54f18fe2602e277e99ec918234448a85088f1d19d904e85cb8c611c"} Sep 29 11:06:17 crc kubenswrapper[4779]: I0929 11:06:17.821287 4779 scope.go:117] "RemoveContainer" containerID="15b85b19dfd233a3ba37a665f3980c005f63a1bb436012dc1ed88412b757a289" Sep 29 11:08:46 crc kubenswrapper[4779]: I0929 11:08:46.967081 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 11:08:46 crc kubenswrapper[4779]: I0929 11:08:46.969726 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 11:09:16 crc kubenswrapper[4779]: I0929 11:09:16.966058 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 11:09:16 crc kubenswrapper[4779]: I0929 11:09:16.966599 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 11:09:46 crc kubenswrapper[4779]: I0929 11:09:46.966580 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 11:09:46 crc kubenswrapper[4779]: I0929 11:09:46.967199 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 11:09:46 crc kubenswrapper[4779]: I0929 11:09:46.967260 4779 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" Sep 29 11:09:46 crc kubenswrapper[4779]: I0929 11:09:46.968165 4779 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0d6f68afc54f18fe2602e277e99ec918234448a85088f1d19d904e85cb8c611c"} pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 11:09:46 crc kubenswrapper[4779]: I0929 11:09:46.968225 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" containerID="cri-o://0d6f68afc54f18fe2602e277e99ec918234448a85088f1d19d904e85cb8c611c" gracePeriod=600 Sep 29 11:09:47 crc kubenswrapper[4779]: E0929 11:09:47.103335 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 11:09:47 crc kubenswrapper[4779]: I0929 11:09:47.190099 4779 generic.go:334] "Generic (PLEG): container finished" podID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerID="0d6f68afc54f18fe2602e277e99ec918234448a85088f1d19d904e85cb8c611c" exitCode=0 Sep 29 11:09:47 crc kubenswrapper[4779]: I0929 11:09:47.190168 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" event={"ID":"f1a5d3a7-37d9-4a87-864c-e4af7f504a19","Type":"ContainerDied","Data":"0d6f68afc54f18fe2602e277e99ec918234448a85088f1d19d904e85cb8c611c"} Sep 29 11:09:47 crc kubenswrapper[4779]: I0929 11:09:47.190245 4779 scope.go:117] "RemoveContainer" containerID="bc71e3138a88f410e3c8cab1951172d465b92f287dcb864022e4b8d363d22735" Sep 29 11:09:47 crc kubenswrapper[4779]: I0929 11:09:47.191010 4779 scope.go:117] "RemoveContainer" containerID="0d6f68afc54f18fe2602e277e99ec918234448a85088f1d19d904e85cb8c611c" Sep 29 11:09:47 crc kubenswrapper[4779]: E0929 11:09:47.191493 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 11:10:02 crc kubenswrapper[4779]: I0929 11:10:02.714275 4779 scope.go:117] "RemoveContainer" containerID="0d6f68afc54f18fe2602e277e99ec918234448a85088f1d19d904e85cb8c611c" Sep 29 11:10:02 crc kubenswrapper[4779]: E0929 11:10:02.715147 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 11:10:13 crc kubenswrapper[4779]: I0929 11:10:13.714967 4779 scope.go:117] "RemoveContainer" containerID="0d6f68afc54f18fe2602e277e99ec918234448a85088f1d19d904e85cb8c611c" Sep 29 11:10:13 crc kubenswrapper[4779]: E0929 11:10:13.715803 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 11:10:26 crc kubenswrapper[4779]: I0929 11:10:26.715900 4779 scope.go:117] "RemoveContainer" containerID="0d6f68afc54f18fe2602e277e99ec918234448a85088f1d19d904e85cb8c611c" Sep 29 11:10:26 crc kubenswrapper[4779]: E0929 11:10:26.716983 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 11:10:38 crc kubenswrapper[4779]: I0929 11:10:38.730655 4779 scope.go:117] "RemoveContainer" containerID="0d6f68afc54f18fe2602e277e99ec918234448a85088f1d19d904e85cb8c611c" Sep 29 11:10:38 crc kubenswrapper[4779]: E0929 11:10:38.731842 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 11:10:46 crc kubenswrapper[4779]: I0929 11:10:46.105696 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-xk5kg"] Sep 29 11:10:46 crc kubenswrapper[4779]: E0929 11:10:46.108161 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9cab2663-d571-4988-b005-338943d811f5" containerName="registry-server" Sep 29 11:10:46 crc kubenswrapper[4779]: I0929 11:10:46.108287 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="9cab2663-d571-4988-b005-338943d811f5" containerName="registry-server" Sep 29 11:10:46 crc kubenswrapper[4779]: E0929 11:10:46.108410 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9cab2663-d571-4988-b005-338943d811f5" containerName="extract-content" Sep 29 11:10:46 crc kubenswrapper[4779]: I0929 11:10:46.108496 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="9cab2663-d571-4988-b005-338943d811f5" containerName="extract-content" Sep 29 11:10:46 crc kubenswrapper[4779]: E0929 11:10:46.108594 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9cab2663-d571-4988-b005-338943d811f5" containerName="extract-utilities" Sep 29 11:10:46 crc kubenswrapper[4779]: I0929 11:10:46.108746 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="9cab2663-d571-4988-b005-338943d811f5" containerName="extract-utilities" Sep 29 11:10:46 crc kubenswrapper[4779]: I0929 11:10:46.109133 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="9cab2663-d571-4988-b005-338943d811f5" containerName="registry-server" Sep 29 11:10:46 crc kubenswrapper[4779]: I0929 11:10:46.111255 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xk5kg" Sep 29 11:10:46 crc kubenswrapper[4779]: I0929 11:10:46.125865 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xk5kg"] Sep 29 11:10:46 crc kubenswrapper[4779]: I0929 11:10:46.218844 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vtkm5\" (UniqueName: \"kubernetes.io/projected/ea6d3ce2-60e2-47d9-875a-047d0535ef41-kube-api-access-vtkm5\") pod \"community-operators-xk5kg\" (UID: \"ea6d3ce2-60e2-47d9-875a-047d0535ef41\") " pod="openshift-marketplace/community-operators-xk5kg" Sep 29 11:10:46 crc kubenswrapper[4779]: I0929 11:10:46.219287 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea6d3ce2-60e2-47d9-875a-047d0535ef41-utilities\") pod \"community-operators-xk5kg\" (UID: \"ea6d3ce2-60e2-47d9-875a-047d0535ef41\") " pod="openshift-marketplace/community-operators-xk5kg" Sep 29 11:10:46 crc kubenswrapper[4779]: I0929 11:10:46.219538 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea6d3ce2-60e2-47d9-875a-047d0535ef41-catalog-content\") pod \"community-operators-xk5kg\" (UID: \"ea6d3ce2-60e2-47d9-875a-047d0535ef41\") " pod="openshift-marketplace/community-operators-xk5kg" Sep 29 11:10:46 crc kubenswrapper[4779]: I0929 11:10:46.321475 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vtkm5\" (UniqueName: \"kubernetes.io/projected/ea6d3ce2-60e2-47d9-875a-047d0535ef41-kube-api-access-vtkm5\") pod \"community-operators-xk5kg\" (UID: \"ea6d3ce2-60e2-47d9-875a-047d0535ef41\") " pod="openshift-marketplace/community-operators-xk5kg" Sep 29 11:10:46 crc kubenswrapper[4779]: I0929 11:10:46.321594 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea6d3ce2-60e2-47d9-875a-047d0535ef41-utilities\") pod \"community-operators-xk5kg\" (UID: \"ea6d3ce2-60e2-47d9-875a-047d0535ef41\") " pod="openshift-marketplace/community-operators-xk5kg" Sep 29 11:10:46 crc kubenswrapper[4779]: I0929 11:10:46.321658 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea6d3ce2-60e2-47d9-875a-047d0535ef41-catalog-content\") pod \"community-operators-xk5kg\" (UID: \"ea6d3ce2-60e2-47d9-875a-047d0535ef41\") " pod="openshift-marketplace/community-operators-xk5kg" Sep 29 11:10:46 crc kubenswrapper[4779]: I0929 11:10:46.322156 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea6d3ce2-60e2-47d9-875a-047d0535ef41-catalog-content\") pod \"community-operators-xk5kg\" (UID: \"ea6d3ce2-60e2-47d9-875a-047d0535ef41\") " pod="openshift-marketplace/community-operators-xk5kg" Sep 29 11:10:46 crc kubenswrapper[4779]: I0929 11:10:46.322323 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea6d3ce2-60e2-47d9-875a-047d0535ef41-utilities\") pod \"community-operators-xk5kg\" (UID: \"ea6d3ce2-60e2-47d9-875a-047d0535ef41\") " pod="openshift-marketplace/community-operators-xk5kg" Sep 29 11:10:46 crc kubenswrapper[4779]: I0929 11:10:46.351310 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vtkm5\" (UniqueName: \"kubernetes.io/projected/ea6d3ce2-60e2-47d9-875a-047d0535ef41-kube-api-access-vtkm5\") pod \"community-operators-xk5kg\" (UID: \"ea6d3ce2-60e2-47d9-875a-047d0535ef41\") " pod="openshift-marketplace/community-operators-xk5kg" Sep 29 11:10:46 crc kubenswrapper[4779]: I0929 11:10:46.453151 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xk5kg" Sep 29 11:10:47 crc kubenswrapper[4779]: I0929 11:10:47.027963 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xk5kg"] Sep 29 11:10:47 crc kubenswrapper[4779]: I0929 11:10:47.798970 4779 generic.go:334] "Generic (PLEG): container finished" podID="ea6d3ce2-60e2-47d9-875a-047d0535ef41" containerID="ffde9a20612dff3340825e85499611a0953fae3da1da7fcb49929c1c837b1175" exitCode=0 Sep 29 11:10:47 crc kubenswrapper[4779]: I0929 11:10:47.799062 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xk5kg" event={"ID":"ea6d3ce2-60e2-47d9-875a-047d0535ef41","Type":"ContainerDied","Data":"ffde9a20612dff3340825e85499611a0953fae3da1da7fcb49929c1c837b1175"} Sep 29 11:10:47 crc kubenswrapper[4779]: I0929 11:10:47.800198 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xk5kg" event={"ID":"ea6d3ce2-60e2-47d9-875a-047d0535ef41","Type":"ContainerStarted","Data":"0d7d8d4228da767d087eb594bb9e27f25e795ded1dd6204404e8aa37165c9b0f"} Sep 29 11:10:47 crc kubenswrapper[4779]: I0929 11:10:47.802204 4779 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 11:10:48 crc kubenswrapper[4779]: I0929 11:10:48.813463 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xk5kg" event={"ID":"ea6d3ce2-60e2-47d9-875a-047d0535ef41","Type":"ContainerStarted","Data":"a711b93175923e8bb527ffa9db6b7f63b5903f9affe7146878fe17ee3426e6a4"} Sep 29 11:10:49 crc kubenswrapper[4779]: I0929 11:10:49.714155 4779 scope.go:117] "RemoveContainer" containerID="0d6f68afc54f18fe2602e277e99ec918234448a85088f1d19d904e85cb8c611c" Sep 29 11:10:49 crc kubenswrapper[4779]: E0929 11:10:49.714769 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 11:10:49 crc kubenswrapper[4779]: I0929 11:10:49.827915 4779 generic.go:334] "Generic (PLEG): container finished" podID="ea6d3ce2-60e2-47d9-875a-047d0535ef41" containerID="a711b93175923e8bb527ffa9db6b7f63b5903f9affe7146878fe17ee3426e6a4" exitCode=0 Sep 29 11:10:49 crc kubenswrapper[4779]: I0929 11:10:49.827978 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xk5kg" event={"ID":"ea6d3ce2-60e2-47d9-875a-047d0535ef41","Type":"ContainerDied","Data":"a711b93175923e8bb527ffa9db6b7f63b5903f9affe7146878fe17ee3426e6a4"} Sep 29 11:10:50 crc kubenswrapper[4779]: I0929 11:10:50.852178 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xk5kg" event={"ID":"ea6d3ce2-60e2-47d9-875a-047d0535ef41","Type":"ContainerStarted","Data":"0cd53144c40616690877e501bee3df4612e3177bd0c9fad2c63a283fefbab3a6"} Sep 29 11:10:50 crc kubenswrapper[4779]: I0929 11:10:50.883085 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-xk5kg" podStartSLOduration=2.286861111 podStartE2EDuration="4.88305686s" podCreationTimestamp="2025-09-29 11:10:46 +0000 UTC" firstStartedPulling="2025-09-29 11:10:47.801870631 +0000 UTC m=+6079.783194545" lastFinishedPulling="2025-09-29 11:10:50.39806639 +0000 UTC m=+6082.379390294" observedRunningTime="2025-09-29 11:10:50.879557368 +0000 UTC m=+6082.860881302" watchObservedRunningTime="2025-09-29 11:10:50.88305686 +0000 UTC m=+6082.864380764" Sep 29 11:10:56 crc kubenswrapper[4779]: I0929 11:10:56.453555 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-xk5kg" Sep 29 11:10:56 crc kubenswrapper[4779]: I0929 11:10:56.454217 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-xk5kg" Sep 29 11:10:56 crc kubenswrapper[4779]: I0929 11:10:56.508464 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-xk5kg" Sep 29 11:10:56 crc kubenswrapper[4779]: I0929 11:10:56.986983 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-xk5kg" Sep 29 11:10:57 crc kubenswrapper[4779]: I0929 11:10:57.038514 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xk5kg"] Sep 29 11:10:58 crc kubenswrapper[4779]: I0929 11:10:58.957751 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-xk5kg" podUID="ea6d3ce2-60e2-47d9-875a-047d0535ef41" containerName="registry-server" containerID="cri-o://0cd53144c40616690877e501bee3df4612e3177bd0c9fad2c63a283fefbab3a6" gracePeriod=2 Sep 29 11:10:59 crc kubenswrapper[4779]: I0929 11:10:59.459226 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xk5kg" Sep 29 11:10:59 crc kubenswrapper[4779]: I0929 11:10:59.629444 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea6d3ce2-60e2-47d9-875a-047d0535ef41-utilities\") pod \"ea6d3ce2-60e2-47d9-875a-047d0535ef41\" (UID: \"ea6d3ce2-60e2-47d9-875a-047d0535ef41\") " Sep 29 11:10:59 crc kubenswrapper[4779]: I0929 11:10:59.629872 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vtkm5\" (UniqueName: \"kubernetes.io/projected/ea6d3ce2-60e2-47d9-875a-047d0535ef41-kube-api-access-vtkm5\") pod \"ea6d3ce2-60e2-47d9-875a-047d0535ef41\" (UID: \"ea6d3ce2-60e2-47d9-875a-047d0535ef41\") " Sep 29 11:10:59 crc kubenswrapper[4779]: I0929 11:10:59.629961 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea6d3ce2-60e2-47d9-875a-047d0535ef41-catalog-content\") pod \"ea6d3ce2-60e2-47d9-875a-047d0535ef41\" (UID: \"ea6d3ce2-60e2-47d9-875a-047d0535ef41\") " Sep 29 11:10:59 crc kubenswrapper[4779]: I0929 11:10:59.631123 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ea6d3ce2-60e2-47d9-875a-047d0535ef41-utilities" (OuterVolumeSpecName: "utilities") pod "ea6d3ce2-60e2-47d9-875a-047d0535ef41" (UID: "ea6d3ce2-60e2-47d9-875a-047d0535ef41"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 11:10:59 crc kubenswrapper[4779]: I0929 11:10:59.636611 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea6d3ce2-60e2-47d9-875a-047d0535ef41-kube-api-access-vtkm5" (OuterVolumeSpecName: "kube-api-access-vtkm5") pod "ea6d3ce2-60e2-47d9-875a-047d0535ef41" (UID: "ea6d3ce2-60e2-47d9-875a-047d0535ef41"). InnerVolumeSpecName "kube-api-access-vtkm5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 11:10:59 crc kubenswrapper[4779]: I0929 11:10:59.687991 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ea6d3ce2-60e2-47d9-875a-047d0535ef41-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ea6d3ce2-60e2-47d9-875a-047d0535ef41" (UID: "ea6d3ce2-60e2-47d9-875a-047d0535ef41"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 11:10:59 crc kubenswrapper[4779]: I0929 11:10:59.733346 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vtkm5\" (UniqueName: \"kubernetes.io/projected/ea6d3ce2-60e2-47d9-875a-047d0535ef41-kube-api-access-vtkm5\") on node \"crc\" DevicePath \"\"" Sep 29 11:10:59 crc kubenswrapper[4779]: I0929 11:10:59.733605 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea6d3ce2-60e2-47d9-875a-047d0535ef41-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 11:10:59 crc kubenswrapper[4779]: I0929 11:10:59.733616 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea6d3ce2-60e2-47d9-875a-047d0535ef41-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 11:10:59 crc kubenswrapper[4779]: I0929 11:10:59.973626 4779 generic.go:334] "Generic (PLEG): container finished" podID="ea6d3ce2-60e2-47d9-875a-047d0535ef41" containerID="0cd53144c40616690877e501bee3df4612e3177bd0c9fad2c63a283fefbab3a6" exitCode=0 Sep 29 11:10:59 crc kubenswrapper[4779]: I0929 11:10:59.973694 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xk5kg" event={"ID":"ea6d3ce2-60e2-47d9-875a-047d0535ef41","Type":"ContainerDied","Data":"0cd53144c40616690877e501bee3df4612e3177bd0c9fad2c63a283fefbab3a6"} Sep 29 11:10:59 crc kubenswrapper[4779]: I0929 11:10:59.973741 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xk5kg" event={"ID":"ea6d3ce2-60e2-47d9-875a-047d0535ef41","Type":"ContainerDied","Data":"0d7d8d4228da767d087eb594bb9e27f25e795ded1dd6204404e8aa37165c9b0f"} Sep 29 11:10:59 crc kubenswrapper[4779]: I0929 11:10:59.973769 4779 scope.go:117] "RemoveContainer" containerID="0cd53144c40616690877e501bee3df4612e3177bd0c9fad2c63a283fefbab3a6" Sep 29 11:10:59 crc kubenswrapper[4779]: I0929 11:10:59.974045 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xk5kg" Sep 29 11:10:59 crc kubenswrapper[4779]: I0929 11:10:59.999554 4779 scope.go:117] "RemoveContainer" containerID="a711b93175923e8bb527ffa9db6b7f63b5903f9affe7146878fe17ee3426e6a4" Sep 29 11:11:00 crc kubenswrapper[4779]: I0929 11:11:00.015633 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xk5kg"] Sep 29 11:11:00 crc kubenswrapper[4779]: I0929 11:11:00.029979 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-xk5kg"] Sep 29 11:11:00 crc kubenswrapper[4779]: I0929 11:11:00.034826 4779 scope.go:117] "RemoveContainer" containerID="ffde9a20612dff3340825e85499611a0953fae3da1da7fcb49929c1c837b1175" Sep 29 11:11:00 crc kubenswrapper[4779]: I0929 11:11:00.077286 4779 scope.go:117] "RemoveContainer" containerID="0cd53144c40616690877e501bee3df4612e3177bd0c9fad2c63a283fefbab3a6" Sep 29 11:11:00 crc kubenswrapper[4779]: E0929 11:11:00.077901 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0cd53144c40616690877e501bee3df4612e3177bd0c9fad2c63a283fefbab3a6\": container with ID starting with 0cd53144c40616690877e501bee3df4612e3177bd0c9fad2c63a283fefbab3a6 not found: ID does not exist" containerID="0cd53144c40616690877e501bee3df4612e3177bd0c9fad2c63a283fefbab3a6" Sep 29 11:11:00 crc kubenswrapper[4779]: I0929 11:11:00.077971 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0cd53144c40616690877e501bee3df4612e3177bd0c9fad2c63a283fefbab3a6"} err="failed to get container status \"0cd53144c40616690877e501bee3df4612e3177bd0c9fad2c63a283fefbab3a6\": rpc error: code = NotFound desc = could not find container \"0cd53144c40616690877e501bee3df4612e3177bd0c9fad2c63a283fefbab3a6\": container with ID starting with 0cd53144c40616690877e501bee3df4612e3177bd0c9fad2c63a283fefbab3a6 not found: ID does not exist" Sep 29 11:11:00 crc kubenswrapper[4779]: I0929 11:11:00.078014 4779 scope.go:117] "RemoveContainer" containerID="a711b93175923e8bb527ffa9db6b7f63b5903f9affe7146878fe17ee3426e6a4" Sep 29 11:11:00 crc kubenswrapper[4779]: E0929 11:11:00.078580 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a711b93175923e8bb527ffa9db6b7f63b5903f9affe7146878fe17ee3426e6a4\": container with ID starting with a711b93175923e8bb527ffa9db6b7f63b5903f9affe7146878fe17ee3426e6a4 not found: ID does not exist" containerID="a711b93175923e8bb527ffa9db6b7f63b5903f9affe7146878fe17ee3426e6a4" Sep 29 11:11:00 crc kubenswrapper[4779]: I0929 11:11:00.078650 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a711b93175923e8bb527ffa9db6b7f63b5903f9affe7146878fe17ee3426e6a4"} err="failed to get container status \"a711b93175923e8bb527ffa9db6b7f63b5903f9affe7146878fe17ee3426e6a4\": rpc error: code = NotFound desc = could not find container \"a711b93175923e8bb527ffa9db6b7f63b5903f9affe7146878fe17ee3426e6a4\": container with ID starting with a711b93175923e8bb527ffa9db6b7f63b5903f9affe7146878fe17ee3426e6a4 not found: ID does not exist" Sep 29 11:11:00 crc kubenswrapper[4779]: I0929 11:11:00.078703 4779 scope.go:117] "RemoveContainer" containerID="ffde9a20612dff3340825e85499611a0953fae3da1da7fcb49929c1c837b1175" Sep 29 11:11:00 crc kubenswrapper[4779]: E0929 11:11:00.079057 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ffde9a20612dff3340825e85499611a0953fae3da1da7fcb49929c1c837b1175\": container with ID starting with ffde9a20612dff3340825e85499611a0953fae3da1da7fcb49929c1c837b1175 not found: ID does not exist" containerID="ffde9a20612dff3340825e85499611a0953fae3da1da7fcb49929c1c837b1175" Sep 29 11:11:00 crc kubenswrapper[4779]: I0929 11:11:00.079168 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ffde9a20612dff3340825e85499611a0953fae3da1da7fcb49929c1c837b1175"} err="failed to get container status \"ffde9a20612dff3340825e85499611a0953fae3da1da7fcb49929c1c837b1175\": rpc error: code = NotFound desc = could not find container \"ffde9a20612dff3340825e85499611a0953fae3da1da7fcb49929c1c837b1175\": container with ID starting with ffde9a20612dff3340825e85499611a0953fae3da1da7fcb49929c1c837b1175 not found: ID does not exist" Sep 29 11:11:00 crc kubenswrapper[4779]: I0929 11:11:00.728271 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ea6d3ce2-60e2-47d9-875a-047d0535ef41" path="/var/lib/kubelet/pods/ea6d3ce2-60e2-47d9-875a-047d0535ef41/volumes" Sep 29 11:11:04 crc kubenswrapper[4779]: I0929 11:11:04.715301 4779 scope.go:117] "RemoveContainer" containerID="0d6f68afc54f18fe2602e277e99ec918234448a85088f1d19d904e85cb8c611c" Sep 29 11:11:04 crc kubenswrapper[4779]: E0929 11:11:04.716012 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 11:11:15 crc kubenswrapper[4779]: I0929 11:11:15.714735 4779 scope.go:117] "RemoveContainer" containerID="0d6f68afc54f18fe2602e277e99ec918234448a85088f1d19d904e85cb8c611c" Sep 29 11:11:15 crc kubenswrapper[4779]: E0929 11:11:15.715619 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 11:11:27 crc kubenswrapper[4779]: I0929 11:11:27.714143 4779 scope.go:117] "RemoveContainer" containerID="0d6f68afc54f18fe2602e277e99ec918234448a85088f1d19d904e85cb8c611c" Sep 29 11:11:27 crc kubenswrapper[4779]: E0929 11:11:27.715206 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 11:11:40 crc kubenswrapper[4779]: I0929 11:11:40.715134 4779 scope.go:117] "RemoveContainer" containerID="0d6f68afc54f18fe2602e277e99ec918234448a85088f1d19d904e85cb8c611c" Sep 29 11:11:40 crc kubenswrapper[4779]: E0929 11:11:40.715964 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 11:11:53 crc kubenswrapper[4779]: I0929 11:11:53.714990 4779 scope.go:117] "RemoveContainer" containerID="0d6f68afc54f18fe2602e277e99ec918234448a85088f1d19d904e85cb8c611c" Sep 29 11:11:53 crc kubenswrapper[4779]: E0929 11:11:53.715813 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 11:12:04 crc kubenswrapper[4779]: I0929 11:12:04.715164 4779 scope.go:117] "RemoveContainer" containerID="0d6f68afc54f18fe2602e277e99ec918234448a85088f1d19d904e85cb8c611c" Sep 29 11:12:04 crc kubenswrapper[4779]: E0929 11:12:04.716061 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 11:12:15 crc kubenswrapper[4779]: I0929 11:12:15.716106 4779 scope.go:117] "RemoveContainer" containerID="0d6f68afc54f18fe2602e277e99ec918234448a85088f1d19d904e85cb8c611c" Sep 29 11:12:15 crc kubenswrapper[4779]: E0929 11:12:15.717084 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 11:12:30 crc kubenswrapper[4779]: I0929 11:12:30.714299 4779 scope.go:117] "RemoveContainer" containerID="0d6f68afc54f18fe2602e277e99ec918234448a85088f1d19d904e85cb8c611c" Sep 29 11:12:30 crc kubenswrapper[4779]: E0929 11:12:30.715110 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 11:12:44 crc kubenswrapper[4779]: I0929 11:12:44.715575 4779 scope.go:117] "RemoveContainer" containerID="0d6f68afc54f18fe2602e277e99ec918234448a85088f1d19d904e85cb8c611c" Sep 29 11:12:44 crc kubenswrapper[4779]: E0929 11:12:44.716864 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 11:12:55 crc kubenswrapper[4779]: I0929 11:12:55.714036 4779 scope.go:117] "RemoveContainer" containerID="0d6f68afc54f18fe2602e277e99ec918234448a85088f1d19d904e85cb8c611c" Sep 29 11:12:55 crc kubenswrapper[4779]: E0929 11:12:55.714780 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 11:13:06 crc kubenswrapper[4779]: I0929 11:13:06.714088 4779 scope.go:117] "RemoveContainer" containerID="0d6f68afc54f18fe2602e277e99ec918234448a85088f1d19d904e85cb8c611c" Sep 29 11:13:06 crc kubenswrapper[4779]: E0929 11:13:06.715053 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 11:13:21 crc kubenswrapper[4779]: I0929 11:13:21.714870 4779 scope.go:117] "RemoveContainer" containerID="0d6f68afc54f18fe2602e277e99ec918234448a85088f1d19d904e85cb8c611c" Sep 29 11:13:21 crc kubenswrapper[4779]: E0929 11:13:21.715922 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 11:13:35 crc kubenswrapper[4779]: I0929 11:13:35.715066 4779 scope.go:117] "RemoveContainer" containerID="0d6f68afc54f18fe2602e277e99ec918234448a85088f1d19d904e85cb8c611c" Sep 29 11:13:35 crc kubenswrapper[4779]: E0929 11:13:35.717267 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 11:13:46 crc kubenswrapper[4779]: I0929 11:13:46.714529 4779 scope.go:117] "RemoveContainer" containerID="0d6f68afc54f18fe2602e277e99ec918234448a85088f1d19d904e85cb8c611c" Sep 29 11:13:46 crc kubenswrapper[4779]: E0929 11:13:46.715452 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 11:14:00 crc kubenswrapper[4779]: I0929 11:14:00.715850 4779 scope.go:117] "RemoveContainer" containerID="0d6f68afc54f18fe2602e277e99ec918234448a85088f1d19d904e85cb8c611c" Sep 29 11:14:00 crc kubenswrapper[4779]: E0929 11:14:00.717012 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 11:14:11 crc kubenswrapper[4779]: I0929 11:14:11.715042 4779 scope.go:117] "RemoveContainer" containerID="0d6f68afc54f18fe2602e277e99ec918234448a85088f1d19d904e85cb8c611c" Sep 29 11:14:11 crc kubenswrapper[4779]: E0929 11:14:11.716005 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 11:14:24 crc kubenswrapper[4779]: I0929 11:14:24.715740 4779 scope.go:117] "RemoveContainer" containerID="0d6f68afc54f18fe2602e277e99ec918234448a85088f1d19d904e85cb8c611c" Sep 29 11:14:24 crc kubenswrapper[4779]: E0929 11:14:24.716433 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 11:14:37 crc kubenswrapper[4779]: I0929 11:14:37.715300 4779 scope.go:117] "RemoveContainer" containerID="0d6f68afc54f18fe2602e277e99ec918234448a85088f1d19d904e85cb8c611c" Sep 29 11:14:37 crc kubenswrapper[4779]: E0929 11:14:37.716526 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 11:14:51 crc kubenswrapper[4779]: I0929 11:14:51.714754 4779 scope.go:117] "RemoveContainer" containerID="0d6f68afc54f18fe2602e277e99ec918234448a85088f1d19d904e85cb8c611c" Sep 29 11:14:52 crc kubenswrapper[4779]: I0929 11:14:52.420704 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" event={"ID":"f1a5d3a7-37d9-4a87-864c-e4af7f504a19","Type":"ContainerStarted","Data":"1a03ce0ceb551acf7dbaa770a33c6c1fb633aca21ecf8c2b971b784f7bf5f2c9"} Sep 29 11:15:00 crc kubenswrapper[4779]: I0929 11:15:00.177394 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319075-fn2fg"] Sep 29 11:15:00 crc kubenswrapper[4779]: E0929 11:15:00.178548 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea6d3ce2-60e2-47d9-875a-047d0535ef41" containerName="registry-server" Sep 29 11:15:00 crc kubenswrapper[4779]: I0929 11:15:00.178565 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea6d3ce2-60e2-47d9-875a-047d0535ef41" containerName="registry-server" Sep 29 11:15:00 crc kubenswrapper[4779]: E0929 11:15:00.178589 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea6d3ce2-60e2-47d9-875a-047d0535ef41" containerName="extract-content" Sep 29 11:15:00 crc kubenswrapper[4779]: I0929 11:15:00.178605 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea6d3ce2-60e2-47d9-875a-047d0535ef41" containerName="extract-content" Sep 29 11:15:00 crc kubenswrapper[4779]: E0929 11:15:00.178634 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea6d3ce2-60e2-47d9-875a-047d0535ef41" containerName="extract-utilities" Sep 29 11:15:00 crc kubenswrapper[4779]: I0929 11:15:00.178642 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea6d3ce2-60e2-47d9-875a-047d0535ef41" containerName="extract-utilities" Sep 29 11:15:00 crc kubenswrapper[4779]: I0929 11:15:00.178878 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea6d3ce2-60e2-47d9-875a-047d0535ef41" containerName="registry-server" Sep 29 11:15:00 crc kubenswrapper[4779]: I0929 11:15:00.179941 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319075-fn2fg" Sep 29 11:15:00 crc kubenswrapper[4779]: I0929 11:15:00.182513 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 29 11:15:00 crc kubenswrapper[4779]: I0929 11:15:00.183320 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 29 11:15:00 crc kubenswrapper[4779]: I0929 11:15:00.190884 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319075-fn2fg"] Sep 29 11:15:00 crc kubenswrapper[4779]: I0929 11:15:00.321839 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5jw25\" (UniqueName: \"kubernetes.io/projected/8c3c4c64-343e-4f3d-8196-68cb1b49dee7-kube-api-access-5jw25\") pod \"collect-profiles-29319075-fn2fg\" (UID: \"8c3c4c64-343e-4f3d-8196-68cb1b49dee7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319075-fn2fg" Sep 29 11:15:00 crc kubenswrapper[4779]: I0929 11:15:00.321959 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8c3c4c64-343e-4f3d-8196-68cb1b49dee7-config-volume\") pod \"collect-profiles-29319075-fn2fg\" (UID: \"8c3c4c64-343e-4f3d-8196-68cb1b49dee7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319075-fn2fg" Sep 29 11:15:00 crc kubenswrapper[4779]: I0929 11:15:00.322422 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8c3c4c64-343e-4f3d-8196-68cb1b49dee7-secret-volume\") pod \"collect-profiles-29319075-fn2fg\" (UID: \"8c3c4c64-343e-4f3d-8196-68cb1b49dee7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319075-fn2fg" Sep 29 11:15:00 crc kubenswrapper[4779]: I0929 11:15:00.424613 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8c3c4c64-343e-4f3d-8196-68cb1b49dee7-secret-volume\") pod \"collect-profiles-29319075-fn2fg\" (UID: \"8c3c4c64-343e-4f3d-8196-68cb1b49dee7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319075-fn2fg" Sep 29 11:15:00 crc kubenswrapper[4779]: I0929 11:15:00.424857 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5jw25\" (UniqueName: \"kubernetes.io/projected/8c3c4c64-343e-4f3d-8196-68cb1b49dee7-kube-api-access-5jw25\") pod \"collect-profiles-29319075-fn2fg\" (UID: \"8c3c4c64-343e-4f3d-8196-68cb1b49dee7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319075-fn2fg" Sep 29 11:15:00 crc kubenswrapper[4779]: I0929 11:15:00.425105 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8c3c4c64-343e-4f3d-8196-68cb1b49dee7-config-volume\") pod \"collect-profiles-29319075-fn2fg\" (UID: \"8c3c4c64-343e-4f3d-8196-68cb1b49dee7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319075-fn2fg" Sep 29 11:15:00 crc kubenswrapper[4779]: I0929 11:15:00.426466 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8c3c4c64-343e-4f3d-8196-68cb1b49dee7-config-volume\") pod \"collect-profiles-29319075-fn2fg\" (UID: \"8c3c4c64-343e-4f3d-8196-68cb1b49dee7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319075-fn2fg" Sep 29 11:15:00 crc kubenswrapper[4779]: I0929 11:15:00.433686 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8c3c4c64-343e-4f3d-8196-68cb1b49dee7-secret-volume\") pod \"collect-profiles-29319075-fn2fg\" (UID: \"8c3c4c64-343e-4f3d-8196-68cb1b49dee7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319075-fn2fg" Sep 29 11:15:00 crc kubenswrapper[4779]: I0929 11:15:00.453712 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5jw25\" (UniqueName: \"kubernetes.io/projected/8c3c4c64-343e-4f3d-8196-68cb1b49dee7-kube-api-access-5jw25\") pod \"collect-profiles-29319075-fn2fg\" (UID: \"8c3c4c64-343e-4f3d-8196-68cb1b49dee7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319075-fn2fg" Sep 29 11:15:00 crc kubenswrapper[4779]: I0929 11:15:00.524045 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319075-fn2fg" Sep 29 11:15:00 crc kubenswrapper[4779]: I0929 11:15:00.658390 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-v6szl"] Sep 29 11:15:00 crc kubenswrapper[4779]: I0929 11:15:00.662660 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-v6szl" Sep 29 11:15:00 crc kubenswrapper[4779]: I0929 11:15:00.677065 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-v6szl"] Sep 29 11:15:00 crc kubenswrapper[4779]: I0929 11:15:00.834654 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7767579a-9feb-4bdb-9438-6f4d251eaa97-catalog-content\") pod \"redhat-operators-v6szl\" (UID: \"7767579a-9feb-4bdb-9438-6f4d251eaa97\") " pod="openshift-marketplace/redhat-operators-v6szl" Sep 29 11:15:00 crc kubenswrapper[4779]: I0929 11:15:00.834892 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7767579a-9feb-4bdb-9438-6f4d251eaa97-utilities\") pod \"redhat-operators-v6szl\" (UID: \"7767579a-9feb-4bdb-9438-6f4d251eaa97\") " pod="openshift-marketplace/redhat-operators-v6szl" Sep 29 11:15:00 crc kubenswrapper[4779]: I0929 11:15:00.835019 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8q9bb\" (UniqueName: \"kubernetes.io/projected/7767579a-9feb-4bdb-9438-6f4d251eaa97-kube-api-access-8q9bb\") pod \"redhat-operators-v6szl\" (UID: \"7767579a-9feb-4bdb-9438-6f4d251eaa97\") " pod="openshift-marketplace/redhat-operators-v6szl" Sep 29 11:15:00 crc kubenswrapper[4779]: I0929 11:15:00.937312 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7767579a-9feb-4bdb-9438-6f4d251eaa97-catalog-content\") pod \"redhat-operators-v6szl\" (UID: \"7767579a-9feb-4bdb-9438-6f4d251eaa97\") " pod="openshift-marketplace/redhat-operators-v6szl" Sep 29 11:15:00 crc kubenswrapper[4779]: I0929 11:15:00.937554 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7767579a-9feb-4bdb-9438-6f4d251eaa97-utilities\") pod \"redhat-operators-v6szl\" (UID: \"7767579a-9feb-4bdb-9438-6f4d251eaa97\") " pod="openshift-marketplace/redhat-operators-v6szl" Sep 29 11:15:00 crc kubenswrapper[4779]: I0929 11:15:00.937672 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8q9bb\" (UniqueName: \"kubernetes.io/projected/7767579a-9feb-4bdb-9438-6f4d251eaa97-kube-api-access-8q9bb\") pod \"redhat-operators-v6szl\" (UID: \"7767579a-9feb-4bdb-9438-6f4d251eaa97\") " pod="openshift-marketplace/redhat-operators-v6szl" Sep 29 11:15:00 crc kubenswrapper[4779]: I0929 11:15:00.938894 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7767579a-9feb-4bdb-9438-6f4d251eaa97-utilities\") pod \"redhat-operators-v6szl\" (UID: \"7767579a-9feb-4bdb-9438-6f4d251eaa97\") " pod="openshift-marketplace/redhat-operators-v6szl" Sep 29 11:15:00 crc kubenswrapper[4779]: I0929 11:15:00.938967 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7767579a-9feb-4bdb-9438-6f4d251eaa97-catalog-content\") pod \"redhat-operators-v6szl\" (UID: \"7767579a-9feb-4bdb-9438-6f4d251eaa97\") " pod="openshift-marketplace/redhat-operators-v6szl" Sep 29 11:15:00 crc kubenswrapper[4779]: I0929 11:15:00.964968 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8q9bb\" (UniqueName: \"kubernetes.io/projected/7767579a-9feb-4bdb-9438-6f4d251eaa97-kube-api-access-8q9bb\") pod \"redhat-operators-v6szl\" (UID: \"7767579a-9feb-4bdb-9438-6f4d251eaa97\") " pod="openshift-marketplace/redhat-operators-v6szl" Sep 29 11:15:00 crc kubenswrapper[4779]: I0929 11:15:00.998892 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-v6szl" Sep 29 11:15:01 crc kubenswrapper[4779]: I0929 11:15:01.121156 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319075-fn2fg"] Sep 29 11:15:01 crc kubenswrapper[4779]: I0929 11:15:01.511809 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319075-fn2fg" event={"ID":"8c3c4c64-343e-4f3d-8196-68cb1b49dee7","Type":"ContainerStarted","Data":"9da20658a23afd4c27ceb6e5845aadb62442822d45b1689b20cb883ca11edb8b"} Sep 29 11:15:01 crc kubenswrapper[4779]: I0929 11:15:01.512342 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319075-fn2fg" event={"ID":"8c3c4c64-343e-4f3d-8196-68cb1b49dee7","Type":"ContainerStarted","Data":"e6939ff4711db2357e315872202168a524a5963d912536574763b52d68b81bca"} Sep 29 11:15:01 crc kubenswrapper[4779]: I0929 11:15:01.566433 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29319075-fn2fg" podStartSLOduration=1.56640939 podStartE2EDuration="1.56640939s" podCreationTimestamp="2025-09-29 11:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 11:15:01.540481976 +0000 UTC m=+6333.521805890" watchObservedRunningTime="2025-09-29 11:15:01.56640939 +0000 UTC m=+6333.547733304" Sep 29 11:15:01 crc kubenswrapper[4779]: I0929 11:15:01.568686 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-v6szl"] Sep 29 11:15:02 crc kubenswrapper[4779]: I0929 11:15:02.537762 4779 generic.go:334] "Generic (PLEG): container finished" podID="7767579a-9feb-4bdb-9438-6f4d251eaa97" containerID="16d0d8aa13c82530e01b2d4cad1af3319c11506e0bf5b5d7c94748431e08ae08" exitCode=0 Sep 29 11:15:02 crc kubenswrapper[4779]: I0929 11:15:02.538281 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v6szl" event={"ID":"7767579a-9feb-4bdb-9438-6f4d251eaa97","Type":"ContainerDied","Data":"16d0d8aa13c82530e01b2d4cad1af3319c11506e0bf5b5d7c94748431e08ae08"} Sep 29 11:15:02 crc kubenswrapper[4779]: I0929 11:15:02.538364 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v6szl" event={"ID":"7767579a-9feb-4bdb-9438-6f4d251eaa97","Type":"ContainerStarted","Data":"1663416bf5db78871570667907dc2d38dc80242a24af8e56ace76965d2fdfaab"} Sep 29 11:15:02 crc kubenswrapper[4779]: I0929 11:15:02.543396 4779 generic.go:334] "Generic (PLEG): container finished" podID="8c3c4c64-343e-4f3d-8196-68cb1b49dee7" containerID="9da20658a23afd4c27ceb6e5845aadb62442822d45b1689b20cb883ca11edb8b" exitCode=0 Sep 29 11:15:02 crc kubenswrapper[4779]: I0929 11:15:02.544662 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319075-fn2fg" event={"ID":"8c3c4c64-343e-4f3d-8196-68cb1b49dee7","Type":"ContainerDied","Data":"9da20658a23afd4c27ceb6e5845aadb62442822d45b1689b20cb883ca11edb8b"} Sep 29 11:15:03 crc kubenswrapper[4779]: I0929 11:15:03.554577 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v6szl" event={"ID":"7767579a-9feb-4bdb-9438-6f4d251eaa97","Type":"ContainerStarted","Data":"496705cdeaef7c945a7af3b15bd63668b373d8a4ab7665b4b0eeb6195733dc5d"} Sep 29 11:15:04 crc kubenswrapper[4779]: I0929 11:15:04.030100 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319075-fn2fg" Sep 29 11:15:04 crc kubenswrapper[4779]: I0929 11:15:04.224377 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8c3c4c64-343e-4f3d-8196-68cb1b49dee7-secret-volume\") pod \"8c3c4c64-343e-4f3d-8196-68cb1b49dee7\" (UID: \"8c3c4c64-343e-4f3d-8196-68cb1b49dee7\") " Sep 29 11:15:04 crc kubenswrapper[4779]: I0929 11:15:04.224535 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8c3c4c64-343e-4f3d-8196-68cb1b49dee7-config-volume\") pod \"8c3c4c64-343e-4f3d-8196-68cb1b49dee7\" (UID: \"8c3c4c64-343e-4f3d-8196-68cb1b49dee7\") " Sep 29 11:15:04 crc kubenswrapper[4779]: I0929 11:15:04.224813 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5jw25\" (UniqueName: \"kubernetes.io/projected/8c3c4c64-343e-4f3d-8196-68cb1b49dee7-kube-api-access-5jw25\") pod \"8c3c4c64-343e-4f3d-8196-68cb1b49dee7\" (UID: \"8c3c4c64-343e-4f3d-8196-68cb1b49dee7\") " Sep 29 11:15:04 crc kubenswrapper[4779]: I0929 11:15:04.227720 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8c3c4c64-343e-4f3d-8196-68cb1b49dee7-config-volume" (OuterVolumeSpecName: "config-volume") pod "8c3c4c64-343e-4f3d-8196-68cb1b49dee7" (UID: "8c3c4c64-343e-4f3d-8196-68cb1b49dee7"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 11:15:04 crc kubenswrapper[4779]: I0929 11:15:04.232016 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c3c4c64-343e-4f3d-8196-68cb1b49dee7-kube-api-access-5jw25" (OuterVolumeSpecName: "kube-api-access-5jw25") pod "8c3c4c64-343e-4f3d-8196-68cb1b49dee7" (UID: "8c3c4c64-343e-4f3d-8196-68cb1b49dee7"). InnerVolumeSpecName "kube-api-access-5jw25". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 11:15:04 crc kubenswrapper[4779]: I0929 11:15:04.242378 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c3c4c64-343e-4f3d-8196-68cb1b49dee7-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "8c3c4c64-343e-4f3d-8196-68cb1b49dee7" (UID: "8c3c4c64-343e-4f3d-8196-68cb1b49dee7"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 11:15:04 crc kubenswrapper[4779]: I0929 11:15:04.327732 4779 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8c3c4c64-343e-4f3d-8196-68cb1b49dee7-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 29 11:15:04 crc kubenswrapper[4779]: I0929 11:15:04.328443 4779 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8c3c4c64-343e-4f3d-8196-68cb1b49dee7-config-volume\") on node \"crc\" DevicePath \"\"" Sep 29 11:15:04 crc kubenswrapper[4779]: I0929 11:15:04.328481 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5jw25\" (UniqueName: \"kubernetes.io/projected/8c3c4c64-343e-4f3d-8196-68cb1b49dee7-kube-api-access-5jw25\") on node \"crc\" DevicePath \"\"" Sep 29 11:15:04 crc kubenswrapper[4779]: E0929 11:15:04.478105 4779 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7767579a_9feb_4bdb_9438_6f4d251eaa97.slice/crio-conmon-496705cdeaef7c945a7af3b15bd63668b373d8a4ab7665b4b0eeb6195733dc5d.scope\": RecentStats: unable to find data in memory cache]" Sep 29 11:15:04 crc kubenswrapper[4779]: I0929 11:15:04.570641 4779 generic.go:334] "Generic (PLEG): container finished" podID="7767579a-9feb-4bdb-9438-6f4d251eaa97" containerID="496705cdeaef7c945a7af3b15bd63668b373d8a4ab7665b4b0eeb6195733dc5d" exitCode=0 Sep 29 11:15:04 crc kubenswrapper[4779]: I0929 11:15:04.570957 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v6szl" event={"ID":"7767579a-9feb-4bdb-9438-6f4d251eaa97","Type":"ContainerDied","Data":"496705cdeaef7c945a7af3b15bd63668b373d8a4ab7665b4b0eeb6195733dc5d"} Sep 29 11:15:04 crc kubenswrapper[4779]: I0929 11:15:04.576050 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319075-fn2fg" Sep 29 11:15:04 crc kubenswrapper[4779]: I0929 11:15:04.577011 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319075-fn2fg" event={"ID":"8c3c4c64-343e-4f3d-8196-68cb1b49dee7","Type":"ContainerDied","Data":"e6939ff4711db2357e315872202168a524a5963d912536574763b52d68b81bca"} Sep 29 11:15:04 crc kubenswrapper[4779]: I0929 11:15:04.577119 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e6939ff4711db2357e315872202168a524a5963d912536574763b52d68b81bca" Sep 29 11:15:04 crc kubenswrapper[4779]: I0929 11:15:04.640297 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319030-krpvl"] Sep 29 11:15:04 crc kubenswrapper[4779]: I0929 11:15:04.649940 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319030-krpvl"] Sep 29 11:15:04 crc kubenswrapper[4779]: I0929 11:15:04.726791 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fcca22c5-a7c4-4ed8-83e9-a9805d21b170" path="/var/lib/kubelet/pods/fcca22c5-a7c4-4ed8-83e9-a9805d21b170/volumes" Sep 29 11:15:07 crc kubenswrapper[4779]: I0929 11:15:07.606076 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v6szl" event={"ID":"7767579a-9feb-4bdb-9438-6f4d251eaa97","Type":"ContainerStarted","Data":"386ba92b9595ea191b01081809f699e38174993a0d07c92d3dfcfbc27d984870"} Sep 29 11:15:07 crc kubenswrapper[4779]: I0929 11:15:07.634711 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-v6szl" podStartSLOduration=3.5951447 podStartE2EDuration="7.63468694s" podCreationTimestamp="2025-09-29 11:15:00 +0000 UTC" firstStartedPulling="2025-09-29 11:15:02.543016082 +0000 UTC m=+6334.524339986" lastFinishedPulling="2025-09-29 11:15:06.582558322 +0000 UTC m=+6338.563882226" observedRunningTime="2025-09-29 11:15:07.627982495 +0000 UTC m=+6339.609306419" watchObservedRunningTime="2025-09-29 11:15:07.63468694 +0000 UTC m=+6339.616010834" Sep 29 11:15:10 crc kubenswrapper[4779]: I0929 11:15:10.999304 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-v6szl" Sep 29 11:15:11 crc kubenswrapper[4779]: I0929 11:15:10.999886 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-v6szl" Sep 29 11:15:11 crc kubenswrapper[4779]: I0929 11:15:11.050321 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-v6szl" Sep 29 11:15:11 crc kubenswrapper[4779]: I0929 11:15:11.699484 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-v6szl" Sep 29 11:15:11 crc kubenswrapper[4779]: I0929 11:15:11.753655 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-v6szl"] Sep 29 11:15:13 crc kubenswrapper[4779]: I0929 11:15:13.663088 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-v6szl" podUID="7767579a-9feb-4bdb-9438-6f4d251eaa97" containerName="registry-server" containerID="cri-o://386ba92b9595ea191b01081809f699e38174993a0d07c92d3dfcfbc27d984870" gracePeriod=2 Sep 29 11:15:14 crc kubenswrapper[4779]: I0929 11:15:14.186321 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-v6szl" Sep 29 11:15:14 crc kubenswrapper[4779]: I0929 11:15:14.342737 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7767579a-9feb-4bdb-9438-6f4d251eaa97-utilities\") pod \"7767579a-9feb-4bdb-9438-6f4d251eaa97\" (UID: \"7767579a-9feb-4bdb-9438-6f4d251eaa97\") " Sep 29 11:15:14 crc kubenswrapper[4779]: I0929 11:15:14.342860 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8q9bb\" (UniqueName: \"kubernetes.io/projected/7767579a-9feb-4bdb-9438-6f4d251eaa97-kube-api-access-8q9bb\") pod \"7767579a-9feb-4bdb-9438-6f4d251eaa97\" (UID: \"7767579a-9feb-4bdb-9438-6f4d251eaa97\") " Sep 29 11:15:14 crc kubenswrapper[4779]: I0929 11:15:14.343144 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7767579a-9feb-4bdb-9438-6f4d251eaa97-catalog-content\") pod \"7767579a-9feb-4bdb-9438-6f4d251eaa97\" (UID: \"7767579a-9feb-4bdb-9438-6f4d251eaa97\") " Sep 29 11:15:14 crc kubenswrapper[4779]: I0929 11:15:14.343982 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7767579a-9feb-4bdb-9438-6f4d251eaa97-utilities" (OuterVolumeSpecName: "utilities") pod "7767579a-9feb-4bdb-9438-6f4d251eaa97" (UID: "7767579a-9feb-4bdb-9438-6f4d251eaa97"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 11:15:14 crc kubenswrapper[4779]: I0929 11:15:14.349836 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7767579a-9feb-4bdb-9438-6f4d251eaa97-kube-api-access-8q9bb" (OuterVolumeSpecName: "kube-api-access-8q9bb") pod "7767579a-9feb-4bdb-9438-6f4d251eaa97" (UID: "7767579a-9feb-4bdb-9438-6f4d251eaa97"). InnerVolumeSpecName "kube-api-access-8q9bb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 11:15:14 crc kubenswrapper[4779]: I0929 11:15:14.428574 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7767579a-9feb-4bdb-9438-6f4d251eaa97-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7767579a-9feb-4bdb-9438-6f4d251eaa97" (UID: "7767579a-9feb-4bdb-9438-6f4d251eaa97"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 11:15:14 crc kubenswrapper[4779]: I0929 11:15:14.445955 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7767579a-9feb-4bdb-9438-6f4d251eaa97-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 11:15:14 crc kubenswrapper[4779]: I0929 11:15:14.446003 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7767579a-9feb-4bdb-9438-6f4d251eaa97-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 11:15:14 crc kubenswrapper[4779]: I0929 11:15:14.446018 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8q9bb\" (UniqueName: \"kubernetes.io/projected/7767579a-9feb-4bdb-9438-6f4d251eaa97-kube-api-access-8q9bb\") on node \"crc\" DevicePath \"\"" Sep 29 11:15:14 crc kubenswrapper[4779]: I0929 11:15:14.679122 4779 generic.go:334] "Generic (PLEG): container finished" podID="7767579a-9feb-4bdb-9438-6f4d251eaa97" containerID="386ba92b9595ea191b01081809f699e38174993a0d07c92d3dfcfbc27d984870" exitCode=0 Sep 29 11:15:14 crc kubenswrapper[4779]: I0929 11:15:14.679184 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v6szl" event={"ID":"7767579a-9feb-4bdb-9438-6f4d251eaa97","Type":"ContainerDied","Data":"386ba92b9595ea191b01081809f699e38174993a0d07c92d3dfcfbc27d984870"} Sep 29 11:15:14 crc kubenswrapper[4779]: I0929 11:15:14.679244 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-v6szl" Sep 29 11:15:14 crc kubenswrapper[4779]: I0929 11:15:14.679412 4779 scope.go:117] "RemoveContainer" containerID="386ba92b9595ea191b01081809f699e38174993a0d07c92d3dfcfbc27d984870" Sep 29 11:15:14 crc kubenswrapper[4779]: I0929 11:15:14.679395 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v6szl" event={"ID":"7767579a-9feb-4bdb-9438-6f4d251eaa97","Type":"ContainerDied","Data":"1663416bf5db78871570667907dc2d38dc80242a24af8e56ace76965d2fdfaab"} Sep 29 11:15:14 crc kubenswrapper[4779]: I0929 11:15:14.708083 4779 scope.go:117] "RemoveContainer" containerID="496705cdeaef7c945a7af3b15bd63668b373d8a4ab7665b4b0eeb6195733dc5d" Sep 29 11:15:14 crc kubenswrapper[4779]: I0929 11:15:14.753548 4779 scope.go:117] "RemoveContainer" containerID="16d0d8aa13c82530e01b2d4cad1af3319c11506e0bf5b5d7c94748431e08ae08" Sep 29 11:15:14 crc kubenswrapper[4779]: I0929 11:15:14.791493 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-v6szl"] Sep 29 11:15:14 crc kubenswrapper[4779]: I0929 11:15:14.816094 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-v6szl"] Sep 29 11:15:14 crc kubenswrapper[4779]: I0929 11:15:14.826222 4779 scope.go:117] "RemoveContainer" containerID="386ba92b9595ea191b01081809f699e38174993a0d07c92d3dfcfbc27d984870" Sep 29 11:15:14 crc kubenswrapper[4779]: E0929 11:15:14.829079 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"386ba92b9595ea191b01081809f699e38174993a0d07c92d3dfcfbc27d984870\": container with ID starting with 386ba92b9595ea191b01081809f699e38174993a0d07c92d3dfcfbc27d984870 not found: ID does not exist" containerID="386ba92b9595ea191b01081809f699e38174993a0d07c92d3dfcfbc27d984870" Sep 29 11:15:14 crc kubenswrapper[4779]: I0929 11:15:14.829187 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"386ba92b9595ea191b01081809f699e38174993a0d07c92d3dfcfbc27d984870"} err="failed to get container status \"386ba92b9595ea191b01081809f699e38174993a0d07c92d3dfcfbc27d984870\": rpc error: code = NotFound desc = could not find container \"386ba92b9595ea191b01081809f699e38174993a0d07c92d3dfcfbc27d984870\": container with ID starting with 386ba92b9595ea191b01081809f699e38174993a0d07c92d3dfcfbc27d984870 not found: ID does not exist" Sep 29 11:15:14 crc kubenswrapper[4779]: I0929 11:15:14.829245 4779 scope.go:117] "RemoveContainer" containerID="496705cdeaef7c945a7af3b15bd63668b373d8a4ab7665b4b0eeb6195733dc5d" Sep 29 11:15:14 crc kubenswrapper[4779]: E0929 11:15:14.829608 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"496705cdeaef7c945a7af3b15bd63668b373d8a4ab7665b4b0eeb6195733dc5d\": container with ID starting with 496705cdeaef7c945a7af3b15bd63668b373d8a4ab7665b4b0eeb6195733dc5d not found: ID does not exist" containerID="496705cdeaef7c945a7af3b15bd63668b373d8a4ab7665b4b0eeb6195733dc5d" Sep 29 11:15:14 crc kubenswrapper[4779]: I0929 11:15:14.829666 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"496705cdeaef7c945a7af3b15bd63668b373d8a4ab7665b4b0eeb6195733dc5d"} err="failed to get container status \"496705cdeaef7c945a7af3b15bd63668b373d8a4ab7665b4b0eeb6195733dc5d\": rpc error: code = NotFound desc = could not find container \"496705cdeaef7c945a7af3b15bd63668b373d8a4ab7665b4b0eeb6195733dc5d\": container with ID starting with 496705cdeaef7c945a7af3b15bd63668b373d8a4ab7665b4b0eeb6195733dc5d not found: ID does not exist" Sep 29 11:15:14 crc kubenswrapper[4779]: I0929 11:15:14.829688 4779 scope.go:117] "RemoveContainer" containerID="16d0d8aa13c82530e01b2d4cad1af3319c11506e0bf5b5d7c94748431e08ae08" Sep 29 11:15:14 crc kubenswrapper[4779]: E0929 11:15:14.830424 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"16d0d8aa13c82530e01b2d4cad1af3319c11506e0bf5b5d7c94748431e08ae08\": container with ID starting with 16d0d8aa13c82530e01b2d4cad1af3319c11506e0bf5b5d7c94748431e08ae08 not found: ID does not exist" containerID="16d0d8aa13c82530e01b2d4cad1af3319c11506e0bf5b5d7c94748431e08ae08" Sep 29 11:15:14 crc kubenswrapper[4779]: I0929 11:15:14.830453 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"16d0d8aa13c82530e01b2d4cad1af3319c11506e0bf5b5d7c94748431e08ae08"} err="failed to get container status \"16d0d8aa13c82530e01b2d4cad1af3319c11506e0bf5b5d7c94748431e08ae08\": rpc error: code = NotFound desc = could not find container \"16d0d8aa13c82530e01b2d4cad1af3319c11506e0bf5b5d7c94748431e08ae08\": container with ID starting with 16d0d8aa13c82530e01b2d4cad1af3319c11506e0bf5b5d7c94748431e08ae08 not found: ID does not exist" Sep 29 11:15:16 crc kubenswrapper[4779]: I0929 11:15:16.728222 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7767579a-9feb-4bdb-9438-6f4d251eaa97" path="/var/lib/kubelet/pods/7767579a-9feb-4bdb-9438-6f4d251eaa97/volumes" Sep 29 11:15:51 crc kubenswrapper[4779]: I0929 11:15:51.858418 4779 scope.go:117] "RemoveContainer" containerID="00bd2d70cd8e7fa96b362852fef8b45251c23cacfaf7e75668000639ecd06a06" Sep 29 11:16:31 crc kubenswrapper[4779]: I0929 11:16:31.409635 4779 generic.go:334] "Generic (PLEG): container finished" podID="42cdeff3-8be3-4a4c-8c71-67d0f8ec2624" containerID="99bf52254012cebec2ddd98a0b3e64db862af78c9f09b3598ee9e71b47c9e1a4" exitCode=1 Sep 29 11:16:31 crc kubenswrapper[4779]: I0929 11:16:31.409714 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"42cdeff3-8be3-4a4c-8c71-67d0f8ec2624","Type":"ContainerDied","Data":"99bf52254012cebec2ddd98a0b3e64db862af78c9f09b3598ee9e71b47c9e1a4"} Sep 29 11:16:32 crc kubenswrapper[4779]: I0929 11:16:32.785452 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Sep 29 11:16:32 crc kubenswrapper[4779]: I0929 11:16:32.931125 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/42cdeff3-8be3-4a4c-8c71-67d0f8ec2624-config-data\") pod \"42cdeff3-8be3-4a4c-8c71-67d0f8ec2624\" (UID: \"42cdeff3-8be3-4a4c-8c71-67d0f8ec2624\") " Sep 29 11:16:32 crc kubenswrapper[4779]: I0929 11:16:32.931242 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/42cdeff3-8be3-4a4c-8c71-67d0f8ec2624-test-operator-ephemeral-temporary\") pod \"42cdeff3-8be3-4a4c-8c71-67d0f8ec2624\" (UID: \"42cdeff3-8be3-4a4c-8c71-67d0f8ec2624\") " Sep 29 11:16:32 crc kubenswrapper[4779]: I0929 11:16:32.931398 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nmmss\" (UniqueName: \"kubernetes.io/projected/42cdeff3-8be3-4a4c-8c71-67d0f8ec2624-kube-api-access-nmmss\") pod \"42cdeff3-8be3-4a4c-8c71-67d0f8ec2624\" (UID: \"42cdeff3-8be3-4a4c-8c71-67d0f8ec2624\") " Sep 29 11:16:32 crc kubenswrapper[4779]: I0929 11:16:32.932249 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/42cdeff3-8be3-4a4c-8c71-67d0f8ec2624-test-operator-ephemeral-workdir\") pod \"42cdeff3-8be3-4a4c-8c71-67d0f8ec2624\" (UID: \"42cdeff3-8be3-4a4c-8c71-67d0f8ec2624\") " Sep 29 11:16:32 crc kubenswrapper[4779]: I0929 11:16:32.932624 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/42cdeff3-8be3-4a4c-8c71-67d0f8ec2624-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "42cdeff3-8be3-4a4c-8c71-67d0f8ec2624" (UID: "42cdeff3-8be3-4a4c-8c71-67d0f8ec2624"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 11:16:32 crc kubenswrapper[4779]: I0929 11:16:32.932854 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/42cdeff3-8be3-4a4c-8c71-67d0f8ec2624-config-data" (OuterVolumeSpecName: "config-data") pod "42cdeff3-8be3-4a4c-8c71-67d0f8ec2624" (UID: "42cdeff3-8be3-4a4c-8c71-67d0f8ec2624"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 11:16:32 crc kubenswrapper[4779]: I0929 11:16:32.936051 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/42cdeff3-8be3-4a4c-8c71-67d0f8ec2624-ca-certs\") pod \"42cdeff3-8be3-4a4c-8c71-67d0f8ec2624\" (UID: \"42cdeff3-8be3-4a4c-8c71-67d0f8ec2624\") " Sep 29 11:16:32 crc kubenswrapper[4779]: I0929 11:16:32.936137 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/42cdeff3-8be3-4a4c-8c71-67d0f8ec2624-openstack-config\") pod \"42cdeff3-8be3-4a4c-8c71-67d0f8ec2624\" (UID: \"42cdeff3-8be3-4a4c-8c71-67d0f8ec2624\") " Sep 29 11:16:32 crc kubenswrapper[4779]: I0929 11:16:32.936191 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/42cdeff3-8be3-4a4c-8c71-67d0f8ec2624-openstack-config-secret\") pod \"42cdeff3-8be3-4a4c-8c71-67d0f8ec2624\" (UID: \"42cdeff3-8be3-4a4c-8c71-67d0f8ec2624\") " Sep 29 11:16:32 crc kubenswrapper[4779]: I0929 11:16:32.936262 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"42cdeff3-8be3-4a4c-8c71-67d0f8ec2624\" (UID: \"42cdeff3-8be3-4a4c-8c71-67d0f8ec2624\") " Sep 29 11:16:32 crc kubenswrapper[4779]: I0929 11:16:32.936332 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/42cdeff3-8be3-4a4c-8c71-67d0f8ec2624-ssh-key\") pod \"42cdeff3-8be3-4a4c-8c71-67d0f8ec2624\" (UID: \"42cdeff3-8be3-4a4c-8c71-67d0f8ec2624\") " Sep 29 11:16:32 crc kubenswrapper[4779]: I0929 11:16:32.938744 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/42cdeff3-8be3-4a4c-8c71-67d0f8ec2624-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "42cdeff3-8be3-4a4c-8c71-67d0f8ec2624" (UID: "42cdeff3-8be3-4a4c-8c71-67d0f8ec2624"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 11:16:32 crc kubenswrapper[4779]: I0929 11:16:32.939829 4779 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/42cdeff3-8be3-4a4c-8c71-67d0f8ec2624-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 11:16:32 crc kubenswrapper[4779]: I0929 11:16:32.939861 4779 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/42cdeff3-8be3-4a4c-8c71-67d0f8ec2624-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Sep 29 11:16:32 crc kubenswrapper[4779]: I0929 11:16:32.939874 4779 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/42cdeff3-8be3-4a4c-8c71-67d0f8ec2624-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Sep 29 11:16:32 crc kubenswrapper[4779]: I0929 11:16:32.963784 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "test-operator-logs") pod "42cdeff3-8be3-4a4c-8c71-67d0f8ec2624" (UID: "42cdeff3-8be3-4a4c-8c71-67d0f8ec2624"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 29 11:16:32 crc kubenswrapper[4779]: I0929 11:16:32.964062 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/42cdeff3-8be3-4a4c-8c71-67d0f8ec2624-kube-api-access-nmmss" (OuterVolumeSpecName: "kube-api-access-nmmss") pod "42cdeff3-8be3-4a4c-8c71-67d0f8ec2624" (UID: "42cdeff3-8be3-4a4c-8c71-67d0f8ec2624"). InnerVolumeSpecName "kube-api-access-nmmss". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 11:16:32 crc kubenswrapper[4779]: I0929 11:16:32.990857 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/42cdeff3-8be3-4a4c-8c71-67d0f8ec2624-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "42cdeff3-8be3-4a4c-8c71-67d0f8ec2624" (UID: "42cdeff3-8be3-4a4c-8c71-67d0f8ec2624"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 11:16:33 crc kubenswrapper[4779]: I0929 11:16:33.010520 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/42cdeff3-8be3-4a4c-8c71-67d0f8ec2624-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "42cdeff3-8be3-4a4c-8c71-67d0f8ec2624" (UID: "42cdeff3-8be3-4a4c-8c71-67d0f8ec2624"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 11:16:33 crc kubenswrapper[4779]: I0929 11:16:33.011101 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/42cdeff3-8be3-4a4c-8c71-67d0f8ec2624-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "42cdeff3-8be3-4a4c-8c71-67d0f8ec2624" (UID: "42cdeff3-8be3-4a4c-8c71-67d0f8ec2624"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 11:16:33 crc kubenswrapper[4779]: I0929 11:16:33.013888 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/42cdeff3-8be3-4a4c-8c71-67d0f8ec2624-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "42cdeff3-8be3-4a4c-8c71-67d0f8ec2624" (UID: "42cdeff3-8be3-4a4c-8c71-67d0f8ec2624"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 11:16:33 crc kubenswrapper[4779]: I0929 11:16:33.040668 4779 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/42cdeff3-8be3-4a4c-8c71-67d0f8ec2624-ca-certs\") on node \"crc\" DevicePath \"\"" Sep 29 11:16:33 crc kubenswrapper[4779]: I0929 11:16:33.040713 4779 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/42cdeff3-8be3-4a4c-8c71-67d0f8ec2624-openstack-config\") on node \"crc\" DevicePath \"\"" Sep 29 11:16:33 crc kubenswrapper[4779]: I0929 11:16:33.040727 4779 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/42cdeff3-8be3-4a4c-8c71-67d0f8ec2624-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Sep 29 11:16:33 crc kubenswrapper[4779]: I0929 11:16:33.040763 4779 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Sep 29 11:16:33 crc kubenswrapper[4779]: I0929 11:16:33.040774 4779 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/42cdeff3-8be3-4a4c-8c71-67d0f8ec2624-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 11:16:33 crc kubenswrapper[4779]: I0929 11:16:33.040783 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nmmss\" (UniqueName: \"kubernetes.io/projected/42cdeff3-8be3-4a4c-8c71-67d0f8ec2624-kube-api-access-nmmss\") on node \"crc\" DevicePath \"\"" Sep 29 11:16:33 crc kubenswrapper[4779]: I0929 11:16:33.062050 4779 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Sep 29 11:16:33 crc kubenswrapper[4779]: I0929 11:16:33.142109 4779 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Sep 29 11:16:33 crc kubenswrapper[4779]: I0929 11:16:33.430243 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"42cdeff3-8be3-4a4c-8c71-67d0f8ec2624","Type":"ContainerDied","Data":"79d3fb142b9a90b0c99327a849b21736440f12d54585ec842382679afdafb5c9"} Sep 29 11:16:33 crc kubenswrapper[4779]: I0929 11:16:33.430298 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="79d3fb142b9a90b0c99327a849b21736440f12d54585ec842382679afdafb5c9" Sep 29 11:16:33 crc kubenswrapper[4779]: I0929 11:16:33.430384 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Sep 29 11:16:35 crc kubenswrapper[4779]: I0929 11:16:35.097490 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Sep 29 11:16:35 crc kubenswrapper[4779]: E0929 11:16:35.098295 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c3c4c64-343e-4f3d-8196-68cb1b49dee7" containerName="collect-profiles" Sep 29 11:16:35 crc kubenswrapper[4779]: I0929 11:16:35.098315 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c3c4c64-343e-4f3d-8196-68cb1b49dee7" containerName="collect-profiles" Sep 29 11:16:35 crc kubenswrapper[4779]: E0929 11:16:35.098336 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7767579a-9feb-4bdb-9438-6f4d251eaa97" containerName="extract-content" Sep 29 11:16:35 crc kubenswrapper[4779]: I0929 11:16:35.098345 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="7767579a-9feb-4bdb-9438-6f4d251eaa97" containerName="extract-content" Sep 29 11:16:35 crc kubenswrapper[4779]: E0929 11:16:35.098385 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7767579a-9feb-4bdb-9438-6f4d251eaa97" containerName="extract-utilities" Sep 29 11:16:35 crc kubenswrapper[4779]: I0929 11:16:35.098396 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="7767579a-9feb-4bdb-9438-6f4d251eaa97" containerName="extract-utilities" Sep 29 11:16:35 crc kubenswrapper[4779]: E0929 11:16:35.098421 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="42cdeff3-8be3-4a4c-8c71-67d0f8ec2624" containerName="tempest-tests-tempest-tests-runner" Sep 29 11:16:35 crc kubenswrapper[4779]: I0929 11:16:35.098430 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="42cdeff3-8be3-4a4c-8c71-67d0f8ec2624" containerName="tempest-tests-tempest-tests-runner" Sep 29 11:16:35 crc kubenswrapper[4779]: E0929 11:16:35.098450 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7767579a-9feb-4bdb-9438-6f4d251eaa97" containerName="registry-server" Sep 29 11:16:35 crc kubenswrapper[4779]: I0929 11:16:35.098458 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="7767579a-9feb-4bdb-9438-6f4d251eaa97" containerName="registry-server" Sep 29 11:16:35 crc kubenswrapper[4779]: I0929 11:16:35.098671 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c3c4c64-343e-4f3d-8196-68cb1b49dee7" containerName="collect-profiles" Sep 29 11:16:35 crc kubenswrapper[4779]: I0929 11:16:35.098685 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="42cdeff3-8be3-4a4c-8c71-67d0f8ec2624" containerName="tempest-tests-tempest-tests-runner" Sep 29 11:16:35 crc kubenswrapper[4779]: I0929 11:16:35.098718 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="7767579a-9feb-4bdb-9438-6f4d251eaa97" containerName="registry-server" Sep 29 11:16:35 crc kubenswrapper[4779]: I0929 11:16:35.099705 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 29 11:16:35 crc kubenswrapper[4779]: I0929 11:16:35.101844 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-xdtbp" Sep 29 11:16:35 crc kubenswrapper[4779]: I0929 11:16:35.111037 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Sep 29 11:16:35 crc kubenswrapper[4779]: I0929 11:16:35.292754 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4f9g4\" (UniqueName: \"kubernetes.io/projected/f393804b-3565-4b1a-8be3-e0b38404894e-kube-api-access-4f9g4\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"f393804b-3565-4b1a-8be3-e0b38404894e\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 29 11:16:35 crc kubenswrapper[4779]: I0929 11:16:35.293266 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"f393804b-3565-4b1a-8be3-e0b38404894e\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 29 11:16:35 crc kubenswrapper[4779]: I0929 11:16:35.395520 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4f9g4\" (UniqueName: \"kubernetes.io/projected/f393804b-3565-4b1a-8be3-e0b38404894e-kube-api-access-4f9g4\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"f393804b-3565-4b1a-8be3-e0b38404894e\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 29 11:16:35 crc kubenswrapper[4779]: I0929 11:16:35.395587 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"f393804b-3565-4b1a-8be3-e0b38404894e\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 29 11:16:35 crc kubenswrapper[4779]: I0929 11:16:35.396231 4779 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"f393804b-3565-4b1a-8be3-e0b38404894e\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 29 11:16:35 crc kubenswrapper[4779]: I0929 11:16:35.417713 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4f9g4\" (UniqueName: \"kubernetes.io/projected/f393804b-3565-4b1a-8be3-e0b38404894e-kube-api-access-4f9g4\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"f393804b-3565-4b1a-8be3-e0b38404894e\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 29 11:16:35 crc kubenswrapper[4779]: I0929 11:16:35.430595 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"f393804b-3565-4b1a-8be3-e0b38404894e\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 29 11:16:35 crc kubenswrapper[4779]: I0929 11:16:35.728353 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 29 11:16:36 crc kubenswrapper[4779]: I0929 11:16:36.206322 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Sep 29 11:16:36 crc kubenswrapper[4779]: I0929 11:16:36.211457 4779 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 11:16:36 crc kubenswrapper[4779]: I0929 11:16:36.459387 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"f393804b-3565-4b1a-8be3-e0b38404894e","Type":"ContainerStarted","Data":"8ee90b2a11f0f8801dc5edc8746623080e4a1ca850342df26a2014571b9c99e5"} Sep 29 11:16:37 crc kubenswrapper[4779]: I0929 11:16:37.474344 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"f393804b-3565-4b1a-8be3-e0b38404894e","Type":"ContainerStarted","Data":"b4abe7a2d26450905a965a8c65f047c2b49f18231d8295bc89f32361da3cebb0"} Sep 29 11:16:37 crc kubenswrapper[4779]: I0929 11:16:37.497442 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" podStartSLOduration=1.488526168 podStartE2EDuration="2.497418278s" podCreationTimestamp="2025-09-29 11:16:35 +0000 UTC" firstStartedPulling="2025-09-29 11:16:36.211063936 +0000 UTC m=+6428.192387860" lastFinishedPulling="2025-09-29 11:16:37.219956066 +0000 UTC m=+6429.201279970" observedRunningTime="2025-09-29 11:16:37.489444256 +0000 UTC m=+6429.470768160" watchObservedRunningTime="2025-09-29 11:16:37.497418278 +0000 UTC m=+6429.478742182" Sep 29 11:17:16 crc kubenswrapper[4779]: I0929 11:17:16.966430 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 11:17:16 crc kubenswrapper[4779]: I0929 11:17:16.967870 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 11:17:24 crc kubenswrapper[4779]: I0929 11:17:24.476343 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-pnns5/must-gather-kxppc"] Sep 29 11:17:24 crc kubenswrapper[4779]: I0929 11:17:24.478781 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-pnns5/must-gather-kxppc" Sep 29 11:17:24 crc kubenswrapper[4779]: I0929 11:17:24.482220 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-pnns5"/"kube-root-ca.crt" Sep 29 11:17:24 crc kubenswrapper[4779]: I0929 11:17:24.482356 4779 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-pnns5"/"openshift-service-ca.crt" Sep 29 11:17:24 crc kubenswrapper[4779]: I0929 11:17:24.482461 4779 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-pnns5"/"default-dockercfg-kcspt" Sep 29 11:17:24 crc kubenswrapper[4779]: I0929 11:17:24.501803 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-pnns5/must-gather-kxppc"] Sep 29 11:17:24 crc kubenswrapper[4779]: I0929 11:17:24.531090 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/a8ec40cf-67b3-436a-a55b-1f7472e8ad29-must-gather-output\") pod \"must-gather-kxppc\" (UID: \"a8ec40cf-67b3-436a-a55b-1f7472e8ad29\") " pod="openshift-must-gather-pnns5/must-gather-kxppc" Sep 29 11:17:24 crc kubenswrapper[4779]: I0929 11:17:24.531237 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n4w6m\" (UniqueName: \"kubernetes.io/projected/a8ec40cf-67b3-436a-a55b-1f7472e8ad29-kube-api-access-n4w6m\") pod \"must-gather-kxppc\" (UID: \"a8ec40cf-67b3-436a-a55b-1f7472e8ad29\") " pod="openshift-must-gather-pnns5/must-gather-kxppc" Sep 29 11:17:24 crc kubenswrapper[4779]: I0929 11:17:24.633679 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n4w6m\" (UniqueName: \"kubernetes.io/projected/a8ec40cf-67b3-436a-a55b-1f7472e8ad29-kube-api-access-n4w6m\") pod \"must-gather-kxppc\" (UID: \"a8ec40cf-67b3-436a-a55b-1f7472e8ad29\") " pod="openshift-must-gather-pnns5/must-gather-kxppc" Sep 29 11:17:24 crc kubenswrapper[4779]: I0929 11:17:24.634406 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/a8ec40cf-67b3-436a-a55b-1f7472e8ad29-must-gather-output\") pod \"must-gather-kxppc\" (UID: \"a8ec40cf-67b3-436a-a55b-1f7472e8ad29\") " pod="openshift-must-gather-pnns5/must-gather-kxppc" Sep 29 11:17:24 crc kubenswrapper[4779]: I0929 11:17:24.634491 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/a8ec40cf-67b3-436a-a55b-1f7472e8ad29-must-gather-output\") pod \"must-gather-kxppc\" (UID: \"a8ec40cf-67b3-436a-a55b-1f7472e8ad29\") " pod="openshift-must-gather-pnns5/must-gather-kxppc" Sep 29 11:17:24 crc kubenswrapper[4779]: I0929 11:17:24.667753 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n4w6m\" (UniqueName: \"kubernetes.io/projected/a8ec40cf-67b3-436a-a55b-1f7472e8ad29-kube-api-access-n4w6m\") pod \"must-gather-kxppc\" (UID: \"a8ec40cf-67b3-436a-a55b-1f7472e8ad29\") " pod="openshift-must-gather-pnns5/must-gather-kxppc" Sep 29 11:17:24 crc kubenswrapper[4779]: I0929 11:17:24.802773 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-pnns5/must-gather-kxppc" Sep 29 11:17:25 crc kubenswrapper[4779]: I0929 11:17:25.328563 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-pnns5/must-gather-kxppc"] Sep 29 11:17:26 crc kubenswrapper[4779]: I0929 11:17:26.004254 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-pnns5/must-gather-kxppc" event={"ID":"a8ec40cf-67b3-436a-a55b-1f7472e8ad29","Type":"ContainerStarted","Data":"0eb61afd810fe9f3c22a0fda9ec708f132ae748fcfa5fe25d4fd10db41c80484"} Sep 29 11:17:32 crc kubenswrapper[4779]: I0929 11:17:32.069824 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-pnns5/must-gather-kxppc" event={"ID":"a8ec40cf-67b3-436a-a55b-1f7472e8ad29","Type":"ContainerStarted","Data":"b9d95caf1effffa088ea89ddaf71dd28d40079b8b1b1c856c198b36cefffa9e7"} Sep 29 11:17:32 crc kubenswrapper[4779]: I0929 11:17:32.070404 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-pnns5/must-gather-kxppc" event={"ID":"a8ec40cf-67b3-436a-a55b-1f7472e8ad29","Type":"ContainerStarted","Data":"aafadc2d00f2e109d652f3c98dcec7dbe9eac3221b35fbdf3101631813d77986"} Sep 29 11:17:32 crc kubenswrapper[4779]: I0929 11:17:32.088759 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-pnns5/must-gather-kxppc" podStartSLOduration=1.919078684 podStartE2EDuration="8.088742123s" podCreationTimestamp="2025-09-29 11:17:24 +0000 UTC" firstStartedPulling="2025-09-29 11:17:25.335483655 +0000 UTC m=+6477.316807559" lastFinishedPulling="2025-09-29 11:17:31.505147084 +0000 UTC m=+6483.486470998" observedRunningTime="2025-09-29 11:17:32.085635562 +0000 UTC m=+6484.066959466" watchObservedRunningTime="2025-09-29 11:17:32.088742123 +0000 UTC m=+6484.070066027" Sep 29 11:17:36 crc kubenswrapper[4779]: I0929 11:17:36.799282 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-pnns5/crc-debug-2d7c5"] Sep 29 11:17:36 crc kubenswrapper[4779]: I0929 11:17:36.802042 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-pnns5/crc-debug-2d7c5" Sep 29 11:17:36 crc kubenswrapper[4779]: I0929 11:17:36.924247 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hz2lg\" (UniqueName: \"kubernetes.io/projected/cdcadbcc-d21a-41c7-aab2-fabb14e9042b-kube-api-access-hz2lg\") pod \"crc-debug-2d7c5\" (UID: \"cdcadbcc-d21a-41c7-aab2-fabb14e9042b\") " pod="openshift-must-gather-pnns5/crc-debug-2d7c5" Sep 29 11:17:36 crc kubenswrapper[4779]: I0929 11:17:36.924489 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/cdcadbcc-d21a-41c7-aab2-fabb14e9042b-host\") pod \"crc-debug-2d7c5\" (UID: \"cdcadbcc-d21a-41c7-aab2-fabb14e9042b\") " pod="openshift-must-gather-pnns5/crc-debug-2d7c5" Sep 29 11:17:37 crc kubenswrapper[4779]: I0929 11:17:37.027107 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hz2lg\" (UniqueName: \"kubernetes.io/projected/cdcadbcc-d21a-41c7-aab2-fabb14e9042b-kube-api-access-hz2lg\") pod \"crc-debug-2d7c5\" (UID: \"cdcadbcc-d21a-41c7-aab2-fabb14e9042b\") " pod="openshift-must-gather-pnns5/crc-debug-2d7c5" Sep 29 11:17:37 crc kubenswrapper[4779]: I0929 11:17:37.027243 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/cdcadbcc-d21a-41c7-aab2-fabb14e9042b-host\") pod \"crc-debug-2d7c5\" (UID: \"cdcadbcc-d21a-41c7-aab2-fabb14e9042b\") " pod="openshift-must-gather-pnns5/crc-debug-2d7c5" Sep 29 11:17:37 crc kubenswrapper[4779]: I0929 11:17:37.027389 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/cdcadbcc-d21a-41c7-aab2-fabb14e9042b-host\") pod \"crc-debug-2d7c5\" (UID: \"cdcadbcc-d21a-41c7-aab2-fabb14e9042b\") " pod="openshift-must-gather-pnns5/crc-debug-2d7c5" Sep 29 11:17:37 crc kubenswrapper[4779]: I0929 11:17:37.057478 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hz2lg\" (UniqueName: \"kubernetes.io/projected/cdcadbcc-d21a-41c7-aab2-fabb14e9042b-kube-api-access-hz2lg\") pod \"crc-debug-2d7c5\" (UID: \"cdcadbcc-d21a-41c7-aab2-fabb14e9042b\") " pod="openshift-must-gather-pnns5/crc-debug-2d7c5" Sep 29 11:17:37 crc kubenswrapper[4779]: I0929 11:17:37.122460 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-pnns5/crc-debug-2d7c5" Sep 29 11:17:38 crc kubenswrapper[4779]: I0929 11:17:38.131305 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-pnns5/crc-debug-2d7c5" event={"ID":"cdcadbcc-d21a-41c7-aab2-fabb14e9042b","Type":"ContainerStarted","Data":"ee0c13d578b3f16bfcb2915fcd04fd31b2fd0a7ffe6d77afc7fffb9c9a8c989d"} Sep 29 11:17:46 crc kubenswrapper[4779]: I0929 11:17:46.966025 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 11:17:46 crc kubenswrapper[4779]: I0929 11:17:46.966581 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 11:17:48 crc kubenswrapper[4779]: I0929 11:17:48.236207 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-pnns5/crc-debug-2d7c5" event={"ID":"cdcadbcc-d21a-41c7-aab2-fabb14e9042b","Type":"ContainerStarted","Data":"f0a3a6dc8539c36e477671f37ed07802b242defccf3926fb6111fe40bd53ec80"} Sep 29 11:17:48 crc kubenswrapper[4779]: I0929 11:17:48.256866 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-pnns5/crc-debug-2d7c5" podStartSLOduration=1.525870061 podStartE2EDuration="12.256838688s" podCreationTimestamp="2025-09-29 11:17:36 +0000 UTC" firstStartedPulling="2025-09-29 11:17:37.162390327 +0000 UTC m=+6489.143714231" lastFinishedPulling="2025-09-29 11:17:47.893358954 +0000 UTC m=+6499.874682858" observedRunningTime="2025-09-29 11:17:48.250780082 +0000 UTC m=+6500.232103996" watchObservedRunningTime="2025-09-29 11:17:48.256838688 +0000 UTC m=+6500.238162592" Sep 29 11:18:16 crc kubenswrapper[4779]: I0929 11:18:16.966994 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 11:18:16 crc kubenswrapper[4779]: I0929 11:18:16.967542 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 11:18:16 crc kubenswrapper[4779]: I0929 11:18:16.967596 4779 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" Sep 29 11:18:16 crc kubenswrapper[4779]: I0929 11:18:16.968236 4779 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1a03ce0ceb551acf7dbaa770a33c6c1fb633aca21ecf8c2b971b784f7bf5f2c9"} pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 11:18:16 crc kubenswrapper[4779]: I0929 11:18:16.968308 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" containerID="cri-o://1a03ce0ceb551acf7dbaa770a33c6c1fb633aca21ecf8c2b971b784f7bf5f2c9" gracePeriod=600 Sep 29 11:18:17 crc kubenswrapper[4779]: I0929 11:18:17.558426 4779 generic.go:334] "Generic (PLEG): container finished" podID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerID="1a03ce0ceb551acf7dbaa770a33c6c1fb633aca21ecf8c2b971b784f7bf5f2c9" exitCode=0 Sep 29 11:18:17 crc kubenswrapper[4779]: I0929 11:18:17.558502 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" event={"ID":"f1a5d3a7-37d9-4a87-864c-e4af7f504a19","Type":"ContainerDied","Data":"1a03ce0ceb551acf7dbaa770a33c6c1fb633aca21ecf8c2b971b784f7bf5f2c9"} Sep 29 11:18:17 crc kubenswrapper[4779]: I0929 11:18:17.558777 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" event={"ID":"f1a5d3a7-37d9-4a87-864c-e4af7f504a19","Type":"ContainerStarted","Data":"e97f4046eb906ccaacbf65683e1f4a4e5b7c1d12fcdf2f114dd137565f224364"} Sep 29 11:18:17 crc kubenswrapper[4779]: I0929 11:18:17.558802 4779 scope.go:117] "RemoveContainer" containerID="0d6f68afc54f18fe2602e277e99ec918234448a85088f1d19d904e85cb8c611c" Sep 29 11:19:06 crc kubenswrapper[4779]: I0929 11:19:06.694097 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-7fd47db598-vbs8p_ca54737d-5152-4b83-9513-602951da4cfe/barbican-api/0.log" Sep 29 11:19:06 crc kubenswrapper[4779]: I0929 11:19:06.873730 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-7fd47db598-vbs8p_ca54737d-5152-4b83-9513-602951da4cfe/barbican-api-log/0.log" Sep 29 11:19:07 crc kubenswrapper[4779]: I0929 11:19:07.297520 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-5565dc4c94-dr8wr_8fdf2fc8-ee7f-4a6b-b9c8-b90e96f8f75c/barbican-keystone-listener/0.log" Sep 29 11:19:07 crc kubenswrapper[4779]: I0929 11:19:07.587239 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-5565dc4c94-dr8wr_8fdf2fc8-ee7f-4a6b-b9c8-b90e96f8f75c/barbican-keystone-listener-log/0.log" Sep 29 11:19:07 crc kubenswrapper[4779]: I0929 11:19:07.842147 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-6cc6cc97c7-9ffpm_e81a45ce-9e33-41f4-9cf2-aa44749e66e3/barbican-worker/0.log" Sep 29 11:19:08 crc kubenswrapper[4779]: I0929 11:19:08.041258 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-6cc6cc97c7-9ffpm_e81a45ce-9e33-41f4-9cf2-aa44749e66e3/barbican-worker-log/0.log" Sep 29 11:19:08 crc kubenswrapper[4779]: I0929 11:19:08.239173 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-fgwfl_19b930c0-52e1-4476-a69e-289a0c246e21/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 11:19:08 crc kubenswrapper[4779]: I0929 11:19:08.534528 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_dabe9a97-d078-4804-bbec-3942030e1914/ceilometer-central-agent/0.log" Sep 29 11:19:08 crc kubenswrapper[4779]: I0929 11:19:08.542341 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_dabe9a97-d078-4804-bbec-3942030e1914/ceilometer-notification-agent/0.log" Sep 29 11:19:08 crc kubenswrapper[4779]: I0929 11:19:08.697442 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_dabe9a97-d078-4804-bbec-3942030e1914/sg-core/0.log" Sep 29 11:19:08 crc kubenswrapper[4779]: I0929 11:19:08.750851 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_dabe9a97-d078-4804-bbec-3942030e1914/proxy-httpd/0.log" Sep 29 11:19:08 crc kubenswrapper[4779]: I0929 11:19:08.961577 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceph-client-edpm-deployment-openstack-edpm-ipam-w88p4_b564f44b-9cb9-4ce1-894d-2c88056ae4ee/ceph-client-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 11:19:09 crc kubenswrapper[4779]: I0929 11:19:09.138161 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-4cfqm_d3187122-fb81-4acf-bcf4-5c45896ea5de/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 11:19:09 crc kubenswrapper[4779]: I0929 11:19:09.617690 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_901ac146-e211-4724-a8f5-2c4f9c966bba/cinder-api-log/0.log" Sep 29 11:19:10 crc kubenswrapper[4779]: I0929 11:19:10.447177 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_901ac146-e211-4724-a8f5-2c4f9c966bba/cinder-api/0.log" Sep 29 11:19:10 crc kubenswrapper[4779]: I0929 11:19:10.671549 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2/probe/0.log" Sep 29 11:19:11 crc kubenswrapper[4779]: I0929 11:19:11.220449 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_d65424b8-83a8-44d3-8a99-c2dfdd5de5c9/cinder-scheduler/0.log" Sep 29 11:19:11 crc kubenswrapper[4779]: I0929 11:19:11.269262 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_405a3bd2-e5cd-41e6-85b6-48ef8e8c47f2/cinder-backup/0.log" Sep 29 11:19:11 crc kubenswrapper[4779]: I0929 11:19:11.509315 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_d65424b8-83a8-44d3-8a99-c2dfdd5de5c9/probe/0.log" Sep 29 11:19:11 crc kubenswrapper[4779]: I0929 11:19:11.760356 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume1-0_c72961de-9fe8-4a03-b6df-d12de65986f1/probe/0.log" Sep 29 11:19:12 crc kubenswrapper[4779]: I0929 11:19:12.014588 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume1-0_c72961de-9fe8-4a03-b6df-d12de65986f1/cinder-volume/0.log" Sep 29 11:19:12 crc kubenswrapper[4779]: I0929 11:19:12.082225 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume2-0_79feadef-f3f0-4d05-94ce-9edadb69bb6e/probe/0.log" Sep 29 11:19:12 crc kubenswrapper[4779]: I0929 11:19:12.328962 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-fnvx4_d1ac3840-3da4-4a8a-bc05-99181c55d968/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 11:19:12 crc kubenswrapper[4779]: I0929 11:19:12.341780 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume2-0_79feadef-f3f0-4d05-94ce-9edadb69bb6e/cinder-volume/0.log" Sep 29 11:19:12 crc kubenswrapper[4779]: I0929 11:19:12.591428 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-ftj8d_f8ccea64-9a22-4841-93a1-8763ed086d3b/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 11:19:12 crc kubenswrapper[4779]: I0929 11:19:12.750657 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-84f9969b4f-s8nfv_ab068d40-9967-4674-a654-b0b2dbcfd76f/init/0.log" Sep 29 11:19:12 crc kubenswrapper[4779]: I0929 11:19:12.925617 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-84f9969b4f-s8nfv_ab068d40-9967-4674-a654-b0b2dbcfd76f/init/0.log" Sep 29 11:19:13 crc kubenswrapper[4779]: I0929 11:19:13.223167 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_ae44fb6f-4ba0-4e8c-801e-ce14e3c69a78/glance-httpd/0.log" Sep 29 11:19:13 crc kubenswrapper[4779]: I0929 11:19:13.281064 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_ae44fb6f-4ba0-4e8c-801e-ce14e3c69a78/glance-log/0.log" Sep 29 11:19:13 crc kubenswrapper[4779]: I0929 11:19:13.488216 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-84f9969b4f-s8nfv_ab068d40-9967-4674-a654-b0b2dbcfd76f/dnsmasq-dns/0.log" Sep 29 11:19:13 crc kubenswrapper[4779]: I0929 11:19:13.540456 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_04247105-501f-4d3a-b624-7e1d64014fe8/glance-httpd/0.log" Sep 29 11:19:13 crc kubenswrapper[4779]: I0929 11:19:13.669031 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_04247105-501f-4d3a-b624-7e1d64014fe8/glance-log/0.log" Sep 29 11:19:13 crc kubenswrapper[4779]: I0929 11:19:13.832909 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-7d46749f98-lswks_4e4ac544-73b5-4ec4-975d-83eac168a331/horizon/0.log" Sep 29 11:19:14 crc kubenswrapper[4779]: I0929 11:19:14.074891 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-gsz77_02785720-b248-42e2-93a3-ccda8cdb2950/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 11:19:14 crc kubenswrapper[4779]: I0929 11:19:14.339032 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-t5q82_fe694ea6-6566-4145-8470-70caa70638d5/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 11:19:14 crc kubenswrapper[4779]: I0929 11:19:14.369843 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-7d46749f98-lswks_4e4ac544-73b5-4ec4-975d-83eac168a331/horizon-log/0.log" Sep 29 11:19:14 crc kubenswrapper[4779]: I0929 11:19:14.803158 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29319001-ssfsw_cc77a3e0-fac1-4945-a706-d4d9fc0a209f/keystone-cron/0.log" Sep 29 11:19:15 crc kubenswrapper[4779]: I0929 11:19:15.020488 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29319061-ldscx_ce5ee5d0-93f2-4a7d-b8b1-b0614e83526d/keystone-cron/0.log" Sep 29 11:19:15 crc kubenswrapper[4779]: I0929 11:19:15.174443 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-864d875667-zb59s_ec529845-7c69-4888-9bb2-d9cd15db73ed/keystone-api/0.log" Sep 29 11:19:15 crc kubenswrapper[4779]: I0929 11:19:15.295038 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_0064d445-dfc2-4f0f-b4ae-26ebc2a1d3ea/kube-state-metrics/0.log" Sep 29 11:19:15 crc kubenswrapper[4779]: I0929 11:19:15.503514 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-ch2sw_b328d4c8-0d40-4d1c-ade1-469e292e6d0e/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 11:19:16 crc kubenswrapper[4779]: I0929 11:19:16.334866 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-68487ccd79-zvm96_cc248e32-59fd-44c1-ab42-636f4a81a203/neutron-api/0.log" Sep 29 11:19:16 crc kubenswrapper[4779]: I0929 11:19:16.371179 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-68487ccd79-zvm96_cc248e32-59fd-44c1-ab42-636f4a81a203/neutron-httpd/0.log" Sep 29 11:19:16 crc kubenswrapper[4779]: I0929 11:19:16.841272 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-pb5p7_f8a009ad-3b49-4843-8096-74a433d5d166/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 11:19:19 crc kubenswrapper[4779]: I0929 11:19:19.319379 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_01d9cfc0-d700-4859-84e9-66ebd1047e97/nova-api-log/0.log" Sep 29 11:19:19 crc kubenswrapper[4779]: I0929 11:19:19.557933 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_01d9cfc0-d700-4859-84e9-66ebd1047e97/nova-api-api/0.log" Sep 29 11:19:20 crc kubenswrapper[4779]: I0929 11:19:20.086916 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_a4cc3c62-fa0a-4be5-b2e9-2ae9df18138e/nova-cell0-conductor-conductor/0.log" Sep 29 11:19:20 crc kubenswrapper[4779]: I0929 11:19:20.129290 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-kgqql"] Sep 29 11:19:20 crc kubenswrapper[4779]: I0929 11:19:20.134367 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kgqql" Sep 29 11:19:20 crc kubenswrapper[4779]: I0929 11:19:20.170072 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-kgqql"] Sep 29 11:19:20 crc kubenswrapper[4779]: I0929 11:19:20.194048 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f3ebc485-8e63-4193-99a1-de05253ee7a5-utilities\") pod \"certified-operators-kgqql\" (UID: \"f3ebc485-8e63-4193-99a1-de05253ee7a5\") " pod="openshift-marketplace/certified-operators-kgqql" Sep 29 11:19:20 crc kubenswrapper[4779]: I0929 11:19:20.194187 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f3ebc485-8e63-4193-99a1-de05253ee7a5-catalog-content\") pod \"certified-operators-kgqql\" (UID: \"f3ebc485-8e63-4193-99a1-de05253ee7a5\") " pod="openshift-marketplace/certified-operators-kgqql" Sep 29 11:19:20 crc kubenswrapper[4779]: I0929 11:19:20.194226 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qsrtf\" (UniqueName: \"kubernetes.io/projected/f3ebc485-8e63-4193-99a1-de05253ee7a5-kube-api-access-qsrtf\") pod \"certified-operators-kgqql\" (UID: \"f3ebc485-8e63-4193-99a1-de05253ee7a5\") " pod="openshift-marketplace/certified-operators-kgqql" Sep 29 11:19:20 crc kubenswrapper[4779]: I0929 11:19:20.258738 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_9ae237e7-f974-47bd-803d-05af4bb116f1/nova-cell1-conductor-conductor/0.log" Sep 29 11:19:20 crc kubenswrapper[4779]: I0929 11:19:20.298459 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f3ebc485-8e63-4193-99a1-de05253ee7a5-utilities\") pod \"certified-operators-kgqql\" (UID: \"f3ebc485-8e63-4193-99a1-de05253ee7a5\") " pod="openshift-marketplace/certified-operators-kgqql" Sep 29 11:19:20 crc kubenswrapper[4779]: I0929 11:19:20.298547 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f3ebc485-8e63-4193-99a1-de05253ee7a5-catalog-content\") pod \"certified-operators-kgqql\" (UID: \"f3ebc485-8e63-4193-99a1-de05253ee7a5\") " pod="openshift-marketplace/certified-operators-kgqql" Sep 29 11:19:20 crc kubenswrapper[4779]: I0929 11:19:20.298576 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qsrtf\" (UniqueName: \"kubernetes.io/projected/f3ebc485-8e63-4193-99a1-de05253ee7a5-kube-api-access-qsrtf\") pod \"certified-operators-kgqql\" (UID: \"f3ebc485-8e63-4193-99a1-de05253ee7a5\") " pod="openshift-marketplace/certified-operators-kgqql" Sep 29 11:19:20 crc kubenswrapper[4779]: I0929 11:19:20.299609 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f3ebc485-8e63-4193-99a1-de05253ee7a5-utilities\") pod \"certified-operators-kgqql\" (UID: \"f3ebc485-8e63-4193-99a1-de05253ee7a5\") " pod="openshift-marketplace/certified-operators-kgqql" Sep 29 11:19:20 crc kubenswrapper[4779]: I0929 11:19:20.299920 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f3ebc485-8e63-4193-99a1-de05253ee7a5-catalog-content\") pod \"certified-operators-kgqql\" (UID: \"f3ebc485-8e63-4193-99a1-de05253ee7a5\") " pod="openshift-marketplace/certified-operators-kgqql" Sep 29 11:19:20 crc kubenswrapper[4779]: I0929 11:19:20.338183 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qsrtf\" (UniqueName: \"kubernetes.io/projected/f3ebc485-8e63-4193-99a1-de05253ee7a5-kube-api-access-qsrtf\") pod \"certified-operators-kgqql\" (UID: \"f3ebc485-8e63-4193-99a1-de05253ee7a5\") " pod="openshift-marketplace/certified-operators-kgqql" Sep 29 11:19:20 crc kubenswrapper[4779]: I0929 11:19:20.474872 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kgqql" Sep 29 11:19:20 crc kubenswrapper[4779]: I0929 11:19:20.750019 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_769b375a-f6f8-4343-897c-622d8a7306d0/nova-cell1-novncproxy-novncproxy/0.log" Sep 29 11:19:21 crc kubenswrapper[4779]: I0929 11:19:21.101695 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-kgqql"] Sep 29 11:19:21 crc kubenswrapper[4779]: I0929 11:19:21.302653 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kgqql" event={"ID":"f3ebc485-8e63-4193-99a1-de05253ee7a5","Type":"ContainerStarted","Data":"7a6b256b5bf4515f6ff779f677aeb6baa0287f6a3dbc431001d5252fa7d9335f"} Sep 29 11:19:21 crc kubenswrapper[4779]: I0929 11:19:21.410440 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-75x7c_f1c042f0-5d85-45db-bd0d-42a9ab1dfcdd/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 11:19:21 crc kubenswrapper[4779]: E0929 11:19:21.625228 4779 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf3ebc485_8e63_4193_99a1_de05253ee7a5.slice/crio-681c5ffac9a7eaab2f5dbeb4185122c116bbb0099e5be9f81ef51912389bafbd.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf3ebc485_8e63_4193_99a1_de05253ee7a5.slice/crio-conmon-681c5ffac9a7eaab2f5dbeb4185122c116bbb0099e5be9f81ef51912389bafbd.scope\": RecentStats: unable to find data in memory cache]" Sep 29 11:19:21 crc kubenswrapper[4779]: I0929 11:19:21.760486 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_087223b2-e95e-4876-8bb1-d9fa4cab5575/nova-metadata-log/0.log" Sep 29 11:19:22 crc kubenswrapper[4779]: I0929 11:19:22.319091 4779 generic.go:334] "Generic (PLEG): container finished" podID="f3ebc485-8e63-4193-99a1-de05253ee7a5" containerID="681c5ffac9a7eaab2f5dbeb4185122c116bbb0099e5be9f81ef51912389bafbd" exitCode=0 Sep 29 11:19:22 crc kubenswrapper[4779]: I0929 11:19:22.319151 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kgqql" event={"ID":"f3ebc485-8e63-4193-99a1-de05253ee7a5","Type":"ContainerDied","Data":"681c5ffac9a7eaab2f5dbeb4185122c116bbb0099e5be9f81ef51912389bafbd"} Sep 29 11:19:22 crc kubenswrapper[4779]: I0929 11:19:22.397324 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_00e4a196-0951-4c3a-9a1e-65e24cf2e6a0/mysql-bootstrap/0.log" Sep 29 11:19:22 crc kubenswrapper[4779]: I0929 11:19:22.426080 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_b34ff414-f728-4ce2-98a5-cc4e005a0a3c/nova-scheduler-scheduler/0.log" Sep 29 11:19:22 crc kubenswrapper[4779]: I0929 11:19:22.661138 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_00e4a196-0951-4c3a-9a1e-65e24cf2e6a0/galera/0.log" Sep 29 11:19:22 crc kubenswrapper[4779]: I0929 11:19:22.665752 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_00e4a196-0951-4c3a-9a1e-65e24cf2e6a0/mysql-bootstrap/0.log" Sep 29 11:19:22 crc kubenswrapper[4779]: I0929 11:19:22.931651 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_bb5f88d2-6663-4ed6-a7a7-93ee500c9edf/mysql-bootstrap/0.log" Sep 29 11:19:23 crc kubenswrapper[4779]: I0929 11:19:23.166240 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_bb5f88d2-6663-4ed6-a7a7-93ee500c9edf/mysql-bootstrap/0.log" Sep 29 11:19:23 crc kubenswrapper[4779]: I0929 11:19:23.168100 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_bb5f88d2-6663-4ed6-a7a7-93ee500c9edf/galera/0.log" Sep 29 11:19:23 crc kubenswrapper[4779]: I0929 11:19:23.398117 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_ad6adeb1-9606-4cd7-bc31-2ed062ade161/openstackclient/0.log" Sep 29 11:19:23 crc kubenswrapper[4779]: I0929 11:19:23.715960 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-dzjsq_290bdf85-850a-4b79-85f7-dc2e662e0ae9/ovn-controller/0.log" Sep 29 11:19:23 crc kubenswrapper[4779]: I0929 11:19:23.952457 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-l5mxb_6553a386-d57f-483f-98ed-99ba90b035c6/openstack-network-exporter/0.log" Sep 29 11:19:24 crc kubenswrapper[4779]: I0929 11:19:24.250767 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-bbznl_a2745c65-9c13-46fa-b3e6-37731ad17208/ovsdb-server-init/0.log" Sep 29 11:19:24 crc kubenswrapper[4779]: I0929 11:19:24.373824 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kgqql" event={"ID":"f3ebc485-8e63-4193-99a1-de05253ee7a5","Type":"ContainerStarted","Data":"e11ad63fc5fb28bf4b06883575583f35c33f5c95c4f936a7b2e8605be4a4d53e"} Sep 29 11:19:24 crc kubenswrapper[4779]: I0929 11:19:24.480185 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-bbznl_a2745c65-9c13-46fa-b3e6-37731ad17208/ovsdb-server-init/0.log" Sep 29 11:19:24 crc kubenswrapper[4779]: I0929 11:19:24.716870 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-bbznl_a2745c65-9c13-46fa-b3e6-37731ad17208/ovsdb-server/0.log" Sep 29 11:19:25 crc kubenswrapper[4779]: I0929 11:19:25.035558 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-bbznl_a2745c65-9c13-46fa-b3e6-37731ad17208/ovs-vswitchd/0.log" Sep 29 11:19:25 crc kubenswrapper[4779]: I0929 11:19:25.078789 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_087223b2-e95e-4876-8bb1-d9fa4cab5575/nova-metadata-metadata/0.log" Sep 29 11:19:25 crc kubenswrapper[4779]: I0929 11:19:25.270555 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-bxslq_4069654b-84b4-4049-ad65-a2414376655c/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 11:19:25 crc kubenswrapper[4779]: I0929 11:19:25.346765 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_1f7e1e01-34a6-4fc9-8c73-597aa6dc5a4a/openstack-network-exporter/0.log" Sep 29 11:19:25 crc kubenswrapper[4779]: I0929 11:19:25.392347 4779 generic.go:334] "Generic (PLEG): container finished" podID="f3ebc485-8e63-4193-99a1-de05253ee7a5" containerID="e11ad63fc5fb28bf4b06883575583f35c33f5c95c4f936a7b2e8605be4a4d53e" exitCode=0 Sep 29 11:19:25 crc kubenswrapper[4779]: I0929 11:19:25.392410 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kgqql" event={"ID":"f3ebc485-8e63-4193-99a1-de05253ee7a5","Type":"ContainerDied","Data":"e11ad63fc5fb28bf4b06883575583f35c33f5c95c4f936a7b2e8605be4a4d53e"} Sep 29 11:19:25 crc kubenswrapper[4779]: I0929 11:19:25.491445 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_1f7e1e01-34a6-4fc9-8c73-597aa6dc5a4a/ovn-northd/0.log" Sep 29 11:19:25 crc kubenswrapper[4779]: I0929 11:19:25.652619 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_65145612-cd78-4a1f-84c3-ea831e0c83b0/openstack-network-exporter/0.log" Sep 29 11:19:25 crc kubenswrapper[4779]: I0929 11:19:25.776135 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_65145612-cd78-4a1f-84c3-ea831e0c83b0/ovsdbserver-nb/0.log" Sep 29 11:19:25 crc kubenswrapper[4779]: I0929 11:19:25.885931 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_d00ff60f-2316-40a6-a874-c7f4e6506a48/openstack-network-exporter/0.log" Sep 29 11:19:26 crc kubenswrapper[4779]: I0929 11:19:26.077450 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_d00ff60f-2316-40a6-a874-c7f4e6506a48/ovsdbserver-sb/0.log" Sep 29 11:19:26 crc kubenswrapper[4779]: I0929 11:19:26.406802 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kgqql" event={"ID":"f3ebc485-8e63-4193-99a1-de05253ee7a5","Type":"ContainerStarted","Data":"e9f4c185b542f6f6e5c2b00448117f62ffa59721b7b4979250dde5a5ea924862"} Sep 29 11:19:26 crc kubenswrapper[4779]: I0929 11:19:26.431338 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-kgqql" podStartSLOduration=2.987277628 podStartE2EDuration="6.431319853s" podCreationTimestamp="2025-09-29 11:19:20 +0000 UTC" firstStartedPulling="2025-09-29 11:19:22.321743626 +0000 UTC m=+6594.303067530" lastFinishedPulling="2025-09-29 11:19:25.765785851 +0000 UTC m=+6597.747109755" observedRunningTime="2025-09-29 11:19:26.424768673 +0000 UTC m=+6598.406092577" watchObservedRunningTime="2025-09-29 11:19:26.431319853 +0000 UTC m=+6598.412643757" Sep 29 11:19:26 crc kubenswrapper[4779]: I0929 11:19:26.536571 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-67b68d58cd-fsdcs_ee014654-7a91-4650-b7d5-76f561da5787/placement-api/0.log" Sep 29 11:19:26 crc kubenswrapper[4779]: I0929 11:19:26.672964 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-67b68d58cd-fsdcs_ee014654-7a91-4650-b7d5-76f561da5787/placement-log/0.log" Sep 29 11:19:26 crc kubenswrapper[4779]: I0929 11:19:26.704228 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_65057b07-2357-4d32-bd20-dee421ca5041/init-config-reloader/0.log" Sep 29 11:19:26 crc kubenswrapper[4779]: I0929 11:19:26.958062 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_65057b07-2357-4d32-bd20-dee421ca5041/init-config-reloader/0.log" Sep 29 11:19:27 crc kubenswrapper[4779]: I0929 11:19:27.011348 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_65057b07-2357-4d32-bd20-dee421ca5041/config-reloader/0.log" Sep 29 11:19:27 crc kubenswrapper[4779]: I0929 11:19:27.012075 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_65057b07-2357-4d32-bd20-dee421ca5041/prometheus/0.log" Sep 29 11:19:27 crc kubenswrapper[4779]: I0929 11:19:27.218626 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_65057b07-2357-4d32-bd20-dee421ca5041/thanos-sidecar/0.log" Sep 29 11:19:27 crc kubenswrapper[4779]: I0929 11:19:27.377658 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_31656a9a-a9ca-46d0-b682-29e0ddde8ff7/setup-container/0.log" Sep 29 11:19:27 crc kubenswrapper[4779]: I0929 11:19:27.637854 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_31656a9a-a9ca-46d0-b682-29e0ddde8ff7/setup-container/0.log" Sep 29 11:19:27 crc kubenswrapper[4779]: I0929 11:19:27.783236 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_31656a9a-a9ca-46d0-b682-29e0ddde8ff7/rabbitmq/0.log" Sep 29 11:19:27 crc kubenswrapper[4779]: I0929 11:19:27.824047 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-notifications-server-0_daa6f578-ea44-4555-be0c-e2b8662386f0/setup-container/0.log" Sep 29 11:19:28 crc kubenswrapper[4779]: I0929 11:19:28.066488 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-notifications-server-0_daa6f578-ea44-4555-be0c-e2b8662386f0/setup-container/0.log" Sep 29 11:19:28 crc kubenswrapper[4779]: I0929 11:19:28.216064 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-notifications-server-0_daa6f578-ea44-4555-be0c-e2b8662386f0/rabbitmq/0.log" Sep 29 11:19:28 crc kubenswrapper[4779]: I0929 11:19:28.347238 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_9a6cc117-b53c-4f46-bbe7-721a5e656cc4/setup-container/0.log" Sep 29 11:19:28 crc kubenswrapper[4779]: I0929 11:19:28.572176 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_9a6cc117-b53c-4f46-bbe7-721a5e656cc4/setup-container/0.log" Sep 29 11:19:28 crc kubenswrapper[4779]: I0929 11:19:28.648175 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_9a6cc117-b53c-4f46-bbe7-721a5e656cc4/rabbitmq/0.log" Sep 29 11:19:28 crc kubenswrapper[4779]: I0929 11:19:28.901866 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-qmwd7_e9108160-117f-465a-b392-20795018a59d/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 11:19:29 crc kubenswrapper[4779]: I0929 11:19:29.004161 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-zt4xx_9d6cbe01-348f-4510-99a9-e5fa8799e2f7/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 11:19:29 crc kubenswrapper[4779]: I0929 11:19:29.217179 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-tzhrj_af4a138d-7203-4903-84b8-76586f3d9969/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 11:19:29 crc kubenswrapper[4779]: I0929 11:19:29.382079 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-qhfkw_660bebed-55a7-40c0-96d7-244c49608f42/ssh-known-hosts-edpm-deployment/0.log" Sep 29 11:19:29 crc kubenswrapper[4779]: I0929 11:19:29.682690 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-wfpn9_70c4a839-8e6a-43d0-8204-550b989527e9/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 11:19:29 crc kubenswrapper[4779]: I0929 11:19:29.884877 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_42cdeff3-8be3-4a4c-8c71-67d0f8ec2624/tempest-tests-tempest-tests-runner/0.log" Sep 29 11:19:29 crc kubenswrapper[4779]: I0929 11:19:29.929632 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_f393804b-3565-4b1a-8be3-e0b38404894e/test-operator-logs-container/0.log" Sep 29 11:19:30 crc kubenswrapper[4779]: I0929 11:19:30.138359 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-czz9f_c5a92885-6922-471b-b66a-76e33b9e63a2/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 11:19:30 crc kubenswrapper[4779]: I0929 11:19:30.475011 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-kgqql" Sep 29 11:19:30 crc kubenswrapper[4779]: I0929 11:19:30.475072 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-kgqql" Sep 29 11:19:30 crc kubenswrapper[4779]: I0929 11:19:30.527262 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-kgqql" Sep 29 11:19:31 crc kubenswrapper[4779]: I0929 11:19:31.525278 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-kgqql" Sep 29 11:19:31 crc kubenswrapper[4779]: I0929 11:19:31.579387 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-kgqql"] Sep 29 11:19:31 crc kubenswrapper[4779]: I0929 11:19:31.607064 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_watcher-applier-0_af9be7eb-53f5-4de6-a341-4ab3d8168181/watcher-applier/0.log" Sep 29 11:19:31 crc kubenswrapper[4779]: I0929 11:19:31.738227 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_watcher-api-0_83913750-39b3-4380-a4a1-b4f900d027d7/watcher-api-log/0.log" Sep 29 11:19:33 crc kubenswrapper[4779]: I0929 11:19:33.483055 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-kgqql" podUID="f3ebc485-8e63-4193-99a1-de05253ee7a5" containerName="registry-server" containerID="cri-o://e9f4c185b542f6f6e5c2b00448117f62ffa59721b7b4979250dde5a5ea924862" gracePeriod=2 Sep 29 11:19:34 crc kubenswrapper[4779]: I0929 11:19:34.047713 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kgqql" Sep 29 11:19:34 crc kubenswrapper[4779]: I0929 11:19:34.131124 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f3ebc485-8e63-4193-99a1-de05253ee7a5-utilities\") pod \"f3ebc485-8e63-4193-99a1-de05253ee7a5\" (UID: \"f3ebc485-8e63-4193-99a1-de05253ee7a5\") " Sep 29 11:19:34 crc kubenswrapper[4779]: I0929 11:19:34.131417 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f3ebc485-8e63-4193-99a1-de05253ee7a5-catalog-content\") pod \"f3ebc485-8e63-4193-99a1-de05253ee7a5\" (UID: \"f3ebc485-8e63-4193-99a1-de05253ee7a5\") " Sep 29 11:19:34 crc kubenswrapper[4779]: I0929 11:19:34.132459 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f3ebc485-8e63-4193-99a1-de05253ee7a5-utilities" (OuterVolumeSpecName: "utilities") pod "f3ebc485-8e63-4193-99a1-de05253ee7a5" (UID: "f3ebc485-8e63-4193-99a1-de05253ee7a5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 11:19:34 crc kubenswrapper[4779]: I0929 11:19:34.133590 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qsrtf\" (UniqueName: \"kubernetes.io/projected/f3ebc485-8e63-4193-99a1-de05253ee7a5-kube-api-access-qsrtf\") pod \"f3ebc485-8e63-4193-99a1-de05253ee7a5\" (UID: \"f3ebc485-8e63-4193-99a1-de05253ee7a5\") " Sep 29 11:19:34 crc kubenswrapper[4779]: I0929 11:19:34.136186 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f3ebc485-8e63-4193-99a1-de05253ee7a5-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 11:19:34 crc kubenswrapper[4779]: I0929 11:19:34.142673 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f3ebc485-8e63-4193-99a1-de05253ee7a5-kube-api-access-qsrtf" (OuterVolumeSpecName: "kube-api-access-qsrtf") pod "f3ebc485-8e63-4193-99a1-de05253ee7a5" (UID: "f3ebc485-8e63-4193-99a1-de05253ee7a5"). InnerVolumeSpecName "kube-api-access-qsrtf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 11:19:34 crc kubenswrapper[4779]: I0929 11:19:34.221097 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f3ebc485-8e63-4193-99a1-de05253ee7a5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f3ebc485-8e63-4193-99a1-de05253ee7a5" (UID: "f3ebc485-8e63-4193-99a1-de05253ee7a5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 11:19:34 crc kubenswrapper[4779]: I0929 11:19:34.238725 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qsrtf\" (UniqueName: \"kubernetes.io/projected/f3ebc485-8e63-4193-99a1-de05253ee7a5-kube-api-access-qsrtf\") on node \"crc\" DevicePath \"\"" Sep 29 11:19:34 crc kubenswrapper[4779]: I0929 11:19:34.238769 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f3ebc485-8e63-4193-99a1-de05253ee7a5-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 11:19:34 crc kubenswrapper[4779]: I0929 11:19:34.502266 4779 generic.go:334] "Generic (PLEG): container finished" podID="f3ebc485-8e63-4193-99a1-de05253ee7a5" containerID="e9f4c185b542f6f6e5c2b00448117f62ffa59721b7b4979250dde5a5ea924862" exitCode=0 Sep 29 11:19:34 crc kubenswrapper[4779]: I0929 11:19:34.502350 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kgqql" event={"ID":"f3ebc485-8e63-4193-99a1-de05253ee7a5","Type":"ContainerDied","Data":"e9f4c185b542f6f6e5c2b00448117f62ffa59721b7b4979250dde5a5ea924862"} Sep 29 11:19:34 crc kubenswrapper[4779]: I0929 11:19:34.502386 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kgqql" event={"ID":"f3ebc485-8e63-4193-99a1-de05253ee7a5","Type":"ContainerDied","Data":"7a6b256b5bf4515f6ff779f677aeb6baa0287f6a3dbc431001d5252fa7d9335f"} Sep 29 11:19:34 crc kubenswrapper[4779]: I0929 11:19:34.502405 4779 scope.go:117] "RemoveContainer" containerID="e9f4c185b542f6f6e5c2b00448117f62ffa59721b7b4979250dde5a5ea924862" Sep 29 11:19:34 crc kubenswrapper[4779]: I0929 11:19:34.502580 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kgqql" Sep 29 11:19:34 crc kubenswrapper[4779]: I0929 11:19:34.535863 4779 scope.go:117] "RemoveContainer" containerID="e11ad63fc5fb28bf4b06883575583f35c33f5c95c4f936a7b2e8605be4a4d53e" Sep 29 11:19:34 crc kubenswrapper[4779]: I0929 11:19:34.552843 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-kgqql"] Sep 29 11:19:34 crc kubenswrapper[4779]: I0929 11:19:34.568682 4779 scope.go:117] "RemoveContainer" containerID="681c5ffac9a7eaab2f5dbeb4185122c116bbb0099e5be9f81ef51912389bafbd" Sep 29 11:19:34 crc kubenswrapper[4779]: I0929 11:19:34.604554 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-kgqql"] Sep 29 11:19:34 crc kubenswrapper[4779]: I0929 11:19:34.626026 4779 scope.go:117] "RemoveContainer" containerID="e9f4c185b542f6f6e5c2b00448117f62ffa59721b7b4979250dde5a5ea924862" Sep 29 11:19:34 crc kubenswrapper[4779]: E0929 11:19:34.628824 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e9f4c185b542f6f6e5c2b00448117f62ffa59721b7b4979250dde5a5ea924862\": container with ID starting with e9f4c185b542f6f6e5c2b00448117f62ffa59721b7b4979250dde5a5ea924862 not found: ID does not exist" containerID="e9f4c185b542f6f6e5c2b00448117f62ffa59721b7b4979250dde5a5ea924862" Sep 29 11:19:34 crc kubenswrapper[4779]: I0929 11:19:34.629101 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e9f4c185b542f6f6e5c2b00448117f62ffa59721b7b4979250dde5a5ea924862"} err="failed to get container status \"e9f4c185b542f6f6e5c2b00448117f62ffa59721b7b4979250dde5a5ea924862\": rpc error: code = NotFound desc = could not find container \"e9f4c185b542f6f6e5c2b00448117f62ffa59721b7b4979250dde5a5ea924862\": container with ID starting with e9f4c185b542f6f6e5c2b00448117f62ffa59721b7b4979250dde5a5ea924862 not found: ID does not exist" Sep 29 11:19:34 crc kubenswrapper[4779]: I0929 11:19:34.629160 4779 scope.go:117] "RemoveContainer" containerID="e11ad63fc5fb28bf4b06883575583f35c33f5c95c4f936a7b2e8605be4a4d53e" Sep 29 11:19:34 crc kubenswrapper[4779]: E0929 11:19:34.629737 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e11ad63fc5fb28bf4b06883575583f35c33f5c95c4f936a7b2e8605be4a4d53e\": container with ID starting with e11ad63fc5fb28bf4b06883575583f35c33f5c95c4f936a7b2e8605be4a4d53e not found: ID does not exist" containerID="e11ad63fc5fb28bf4b06883575583f35c33f5c95c4f936a7b2e8605be4a4d53e" Sep 29 11:19:34 crc kubenswrapper[4779]: I0929 11:19:34.629787 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e11ad63fc5fb28bf4b06883575583f35c33f5c95c4f936a7b2e8605be4a4d53e"} err="failed to get container status \"e11ad63fc5fb28bf4b06883575583f35c33f5c95c4f936a7b2e8605be4a4d53e\": rpc error: code = NotFound desc = could not find container \"e11ad63fc5fb28bf4b06883575583f35c33f5c95c4f936a7b2e8605be4a4d53e\": container with ID starting with e11ad63fc5fb28bf4b06883575583f35c33f5c95c4f936a7b2e8605be4a4d53e not found: ID does not exist" Sep 29 11:19:34 crc kubenswrapper[4779]: I0929 11:19:34.629823 4779 scope.go:117] "RemoveContainer" containerID="681c5ffac9a7eaab2f5dbeb4185122c116bbb0099e5be9f81ef51912389bafbd" Sep 29 11:19:34 crc kubenswrapper[4779]: E0929 11:19:34.631285 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"681c5ffac9a7eaab2f5dbeb4185122c116bbb0099e5be9f81ef51912389bafbd\": container with ID starting with 681c5ffac9a7eaab2f5dbeb4185122c116bbb0099e5be9f81ef51912389bafbd not found: ID does not exist" containerID="681c5ffac9a7eaab2f5dbeb4185122c116bbb0099e5be9f81ef51912389bafbd" Sep 29 11:19:34 crc kubenswrapper[4779]: I0929 11:19:34.631318 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"681c5ffac9a7eaab2f5dbeb4185122c116bbb0099e5be9f81ef51912389bafbd"} err="failed to get container status \"681c5ffac9a7eaab2f5dbeb4185122c116bbb0099e5be9f81ef51912389bafbd\": rpc error: code = NotFound desc = could not find container \"681c5ffac9a7eaab2f5dbeb4185122c116bbb0099e5be9f81ef51912389bafbd\": container with ID starting with 681c5ffac9a7eaab2f5dbeb4185122c116bbb0099e5be9f81ef51912389bafbd not found: ID does not exist" Sep 29 11:19:34 crc kubenswrapper[4779]: I0929 11:19:34.731060 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f3ebc485-8e63-4193-99a1-de05253ee7a5" path="/var/lib/kubelet/pods/f3ebc485-8e63-4193-99a1-de05253ee7a5/volumes" Sep 29 11:19:36 crc kubenswrapper[4779]: I0929 11:19:36.797965 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_watcher-decision-engine-0_8b8a6691-6feb-4a11-acd4-0e02f5454a47/watcher-decision-engine/0.log" Sep 29 11:19:37 crc kubenswrapper[4779]: I0929 11:19:37.789733 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_watcher-api-0_83913750-39b3-4380-a4a1-b4f900d027d7/watcher-api/0.log" Sep 29 11:19:41 crc kubenswrapper[4779]: I0929 11:19:41.094205 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-qm6zb"] Sep 29 11:19:41 crc kubenswrapper[4779]: E0929 11:19:41.095479 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3ebc485-8e63-4193-99a1-de05253ee7a5" containerName="extract-content" Sep 29 11:19:41 crc kubenswrapper[4779]: I0929 11:19:41.095501 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3ebc485-8e63-4193-99a1-de05253ee7a5" containerName="extract-content" Sep 29 11:19:41 crc kubenswrapper[4779]: E0929 11:19:41.095536 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3ebc485-8e63-4193-99a1-de05253ee7a5" containerName="registry-server" Sep 29 11:19:41 crc kubenswrapper[4779]: I0929 11:19:41.095544 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3ebc485-8e63-4193-99a1-de05253ee7a5" containerName="registry-server" Sep 29 11:19:41 crc kubenswrapper[4779]: E0929 11:19:41.095573 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3ebc485-8e63-4193-99a1-de05253ee7a5" containerName="extract-utilities" Sep 29 11:19:41 crc kubenswrapper[4779]: I0929 11:19:41.095579 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3ebc485-8e63-4193-99a1-de05253ee7a5" containerName="extract-utilities" Sep 29 11:19:41 crc kubenswrapper[4779]: I0929 11:19:41.095873 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="f3ebc485-8e63-4193-99a1-de05253ee7a5" containerName="registry-server" Sep 29 11:19:41 crc kubenswrapper[4779]: I0929 11:19:41.112544 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qm6zb" Sep 29 11:19:41 crc kubenswrapper[4779]: I0929 11:19:41.128618 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qm6zb"] Sep 29 11:19:41 crc kubenswrapper[4779]: I0929 11:19:41.210930 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/124a95e5-75ea-4a0c-937e-9ef8a300caf3-utilities\") pod \"redhat-marketplace-qm6zb\" (UID: \"124a95e5-75ea-4a0c-937e-9ef8a300caf3\") " pod="openshift-marketplace/redhat-marketplace-qm6zb" Sep 29 11:19:41 crc kubenswrapper[4779]: I0929 11:19:41.211093 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/124a95e5-75ea-4a0c-937e-9ef8a300caf3-catalog-content\") pod \"redhat-marketplace-qm6zb\" (UID: \"124a95e5-75ea-4a0c-937e-9ef8a300caf3\") " pod="openshift-marketplace/redhat-marketplace-qm6zb" Sep 29 11:19:41 crc kubenswrapper[4779]: I0929 11:19:41.211223 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7t8sp\" (UniqueName: \"kubernetes.io/projected/124a95e5-75ea-4a0c-937e-9ef8a300caf3-kube-api-access-7t8sp\") pod \"redhat-marketplace-qm6zb\" (UID: \"124a95e5-75ea-4a0c-937e-9ef8a300caf3\") " pod="openshift-marketplace/redhat-marketplace-qm6zb" Sep 29 11:19:41 crc kubenswrapper[4779]: I0929 11:19:41.312754 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/124a95e5-75ea-4a0c-937e-9ef8a300caf3-utilities\") pod \"redhat-marketplace-qm6zb\" (UID: \"124a95e5-75ea-4a0c-937e-9ef8a300caf3\") " pod="openshift-marketplace/redhat-marketplace-qm6zb" Sep 29 11:19:41 crc kubenswrapper[4779]: I0929 11:19:41.312856 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/124a95e5-75ea-4a0c-937e-9ef8a300caf3-catalog-content\") pod \"redhat-marketplace-qm6zb\" (UID: \"124a95e5-75ea-4a0c-937e-9ef8a300caf3\") " pod="openshift-marketplace/redhat-marketplace-qm6zb" Sep 29 11:19:41 crc kubenswrapper[4779]: I0929 11:19:41.312923 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7t8sp\" (UniqueName: \"kubernetes.io/projected/124a95e5-75ea-4a0c-937e-9ef8a300caf3-kube-api-access-7t8sp\") pod \"redhat-marketplace-qm6zb\" (UID: \"124a95e5-75ea-4a0c-937e-9ef8a300caf3\") " pod="openshift-marketplace/redhat-marketplace-qm6zb" Sep 29 11:19:41 crc kubenswrapper[4779]: I0929 11:19:41.313352 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/124a95e5-75ea-4a0c-937e-9ef8a300caf3-utilities\") pod \"redhat-marketplace-qm6zb\" (UID: \"124a95e5-75ea-4a0c-937e-9ef8a300caf3\") " pod="openshift-marketplace/redhat-marketplace-qm6zb" Sep 29 11:19:41 crc kubenswrapper[4779]: I0929 11:19:41.315256 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/124a95e5-75ea-4a0c-937e-9ef8a300caf3-catalog-content\") pod \"redhat-marketplace-qm6zb\" (UID: \"124a95e5-75ea-4a0c-937e-9ef8a300caf3\") " pod="openshift-marketplace/redhat-marketplace-qm6zb" Sep 29 11:19:41 crc kubenswrapper[4779]: I0929 11:19:41.351663 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7t8sp\" (UniqueName: \"kubernetes.io/projected/124a95e5-75ea-4a0c-937e-9ef8a300caf3-kube-api-access-7t8sp\") pod \"redhat-marketplace-qm6zb\" (UID: \"124a95e5-75ea-4a0c-937e-9ef8a300caf3\") " pod="openshift-marketplace/redhat-marketplace-qm6zb" Sep 29 11:19:41 crc kubenswrapper[4779]: I0929 11:19:41.439130 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qm6zb" Sep 29 11:19:41 crc kubenswrapper[4779]: I0929 11:19:41.996765 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qm6zb"] Sep 29 11:19:42 crc kubenswrapper[4779]: I0929 11:19:42.596698 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_75fdeaf0-3dbf-4249-b03d-8b59289a2d58/memcached/0.log" Sep 29 11:19:42 crc kubenswrapper[4779]: I0929 11:19:42.628977 4779 generic.go:334] "Generic (PLEG): container finished" podID="124a95e5-75ea-4a0c-937e-9ef8a300caf3" containerID="2ecaa06012a77eef6600d47cddd06d5e77b8d622676cd7d1fcc91683325b90c2" exitCode=0 Sep 29 11:19:42 crc kubenswrapper[4779]: I0929 11:19:42.629043 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qm6zb" event={"ID":"124a95e5-75ea-4a0c-937e-9ef8a300caf3","Type":"ContainerDied","Data":"2ecaa06012a77eef6600d47cddd06d5e77b8d622676cd7d1fcc91683325b90c2"} Sep 29 11:19:42 crc kubenswrapper[4779]: I0929 11:19:42.629082 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qm6zb" event={"ID":"124a95e5-75ea-4a0c-937e-9ef8a300caf3","Type":"ContainerStarted","Data":"1bbb88af9867ecb0fa4c207a9ad233803b4d9ea162437362bcd3972f41b2b087"} Sep 29 11:19:44 crc kubenswrapper[4779]: I0929 11:19:44.650451 4779 generic.go:334] "Generic (PLEG): container finished" podID="124a95e5-75ea-4a0c-937e-9ef8a300caf3" containerID="876b3f1397f570efce8c515c78fdb245596b0a85ff9433e5385caafb78c3db2e" exitCode=0 Sep 29 11:19:44 crc kubenswrapper[4779]: I0929 11:19:44.650512 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qm6zb" event={"ID":"124a95e5-75ea-4a0c-937e-9ef8a300caf3","Type":"ContainerDied","Data":"876b3f1397f570efce8c515c78fdb245596b0a85ff9433e5385caafb78c3db2e"} Sep 29 11:19:45 crc kubenswrapper[4779]: I0929 11:19:45.667032 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qm6zb" event={"ID":"124a95e5-75ea-4a0c-937e-9ef8a300caf3","Type":"ContainerStarted","Data":"794dac6e9c01d6298d3210de60e5d3e5c25f70efbd17783117ae9095d4aa3b3c"} Sep 29 11:19:45 crc kubenswrapper[4779]: I0929 11:19:45.693483 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-qm6zb" podStartSLOduration=2.240996895 podStartE2EDuration="4.693459533s" podCreationTimestamp="2025-09-29 11:19:41 +0000 UTC" firstStartedPulling="2025-09-29 11:19:42.631296047 +0000 UTC m=+6614.612619951" lastFinishedPulling="2025-09-29 11:19:45.083758685 +0000 UTC m=+6617.065082589" observedRunningTime="2025-09-29 11:19:45.690890898 +0000 UTC m=+6617.672214802" watchObservedRunningTime="2025-09-29 11:19:45.693459533 +0000 UTC m=+6617.674783437" Sep 29 11:19:51 crc kubenswrapper[4779]: I0929 11:19:51.439436 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-qm6zb" Sep 29 11:19:51 crc kubenswrapper[4779]: I0929 11:19:51.440035 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-qm6zb" Sep 29 11:19:51 crc kubenswrapper[4779]: I0929 11:19:51.500534 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-qm6zb" Sep 29 11:19:51 crc kubenswrapper[4779]: I0929 11:19:51.800730 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-qm6zb" Sep 29 11:19:57 crc kubenswrapper[4779]: I0929 11:19:57.732760 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qm6zb"] Sep 29 11:19:57 crc kubenswrapper[4779]: I0929 11:19:57.734753 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-qm6zb" podUID="124a95e5-75ea-4a0c-937e-9ef8a300caf3" containerName="registry-server" containerID="cri-o://794dac6e9c01d6298d3210de60e5d3e5c25f70efbd17783117ae9095d4aa3b3c" gracePeriod=2 Sep 29 11:19:58 crc kubenswrapper[4779]: I0929 11:19:58.226509 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qm6zb" Sep 29 11:19:58 crc kubenswrapper[4779]: I0929 11:19:58.323426 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/124a95e5-75ea-4a0c-937e-9ef8a300caf3-catalog-content\") pod \"124a95e5-75ea-4a0c-937e-9ef8a300caf3\" (UID: \"124a95e5-75ea-4a0c-937e-9ef8a300caf3\") " Sep 29 11:19:58 crc kubenswrapper[4779]: I0929 11:19:58.325237 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7t8sp\" (UniqueName: \"kubernetes.io/projected/124a95e5-75ea-4a0c-937e-9ef8a300caf3-kube-api-access-7t8sp\") pod \"124a95e5-75ea-4a0c-937e-9ef8a300caf3\" (UID: \"124a95e5-75ea-4a0c-937e-9ef8a300caf3\") " Sep 29 11:19:58 crc kubenswrapper[4779]: I0929 11:19:58.326090 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/124a95e5-75ea-4a0c-937e-9ef8a300caf3-utilities\") pod \"124a95e5-75ea-4a0c-937e-9ef8a300caf3\" (UID: \"124a95e5-75ea-4a0c-937e-9ef8a300caf3\") " Sep 29 11:19:58 crc kubenswrapper[4779]: I0929 11:19:58.327752 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/124a95e5-75ea-4a0c-937e-9ef8a300caf3-utilities" (OuterVolumeSpecName: "utilities") pod "124a95e5-75ea-4a0c-937e-9ef8a300caf3" (UID: "124a95e5-75ea-4a0c-937e-9ef8a300caf3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 11:19:58 crc kubenswrapper[4779]: I0929 11:19:58.328418 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/124a95e5-75ea-4a0c-937e-9ef8a300caf3-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 11:19:58 crc kubenswrapper[4779]: I0929 11:19:58.337198 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/124a95e5-75ea-4a0c-937e-9ef8a300caf3-kube-api-access-7t8sp" (OuterVolumeSpecName: "kube-api-access-7t8sp") pod "124a95e5-75ea-4a0c-937e-9ef8a300caf3" (UID: "124a95e5-75ea-4a0c-937e-9ef8a300caf3"). InnerVolumeSpecName "kube-api-access-7t8sp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 11:19:58 crc kubenswrapper[4779]: I0929 11:19:58.353123 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/124a95e5-75ea-4a0c-937e-9ef8a300caf3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "124a95e5-75ea-4a0c-937e-9ef8a300caf3" (UID: "124a95e5-75ea-4a0c-937e-9ef8a300caf3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 11:19:58 crc kubenswrapper[4779]: I0929 11:19:58.431260 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/124a95e5-75ea-4a0c-937e-9ef8a300caf3-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 11:19:58 crc kubenswrapper[4779]: I0929 11:19:58.431616 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7t8sp\" (UniqueName: \"kubernetes.io/projected/124a95e5-75ea-4a0c-937e-9ef8a300caf3-kube-api-access-7t8sp\") on node \"crc\" DevicePath \"\"" Sep 29 11:19:58 crc kubenswrapper[4779]: I0929 11:19:58.816452 4779 generic.go:334] "Generic (PLEG): container finished" podID="124a95e5-75ea-4a0c-937e-9ef8a300caf3" containerID="794dac6e9c01d6298d3210de60e5d3e5c25f70efbd17783117ae9095d4aa3b3c" exitCode=0 Sep 29 11:19:58 crc kubenswrapper[4779]: I0929 11:19:58.816496 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qm6zb" event={"ID":"124a95e5-75ea-4a0c-937e-9ef8a300caf3","Type":"ContainerDied","Data":"794dac6e9c01d6298d3210de60e5d3e5c25f70efbd17783117ae9095d4aa3b3c"} Sep 29 11:19:58 crc kubenswrapper[4779]: I0929 11:19:58.816525 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qm6zb" event={"ID":"124a95e5-75ea-4a0c-937e-9ef8a300caf3","Type":"ContainerDied","Data":"1bbb88af9867ecb0fa4c207a9ad233803b4d9ea162437362bcd3972f41b2b087"} Sep 29 11:19:58 crc kubenswrapper[4779]: I0929 11:19:58.816542 4779 scope.go:117] "RemoveContainer" containerID="794dac6e9c01d6298d3210de60e5d3e5c25f70efbd17783117ae9095d4aa3b3c" Sep 29 11:19:58 crc kubenswrapper[4779]: I0929 11:19:58.816662 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qm6zb" Sep 29 11:19:58 crc kubenswrapper[4779]: I0929 11:19:58.842975 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qm6zb"] Sep 29 11:19:58 crc kubenswrapper[4779]: I0929 11:19:58.843487 4779 scope.go:117] "RemoveContainer" containerID="876b3f1397f570efce8c515c78fdb245596b0a85ff9433e5385caafb78c3db2e" Sep 29 11:19:58 crc kubenswrapper[4779]: I0929 11:19:58.874164 4779 scope.go:117] "RemoveContainer" containerID="2ecaa06012a77eef6600d47cddd06d5e77b8d622676cd7d1fcc91683325b90c2" Sep 29 11:19:58 crc kubenswrapper[4779]: I0929 11:19:58.876054 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-qm6zb"] Sep 29 11:19:58 crc kubenswrapper[4779]: I0929 11:19:58.922891 4779 scope.go:117] "RemoveContainer" containerID="794dac6e9c01d6298d3210de60e5d3e5c25f70efbd17783117ae9095d4aa3b3c" Sep 29 11:19:58 crc kubenswrapper[4779]: E0929 11:19:58.923488 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"794dac6e9c01d6298d3210de60e5d3e5c25f70efbd17783117ae9095d4aa3b3c\": container with ID starting with 794dac6e9c01d6298d3210de60e5d3e5c25f70efbd17783117ae9095d4aa3b3c not found: ID does not exist" containerID="794dac6e9c01d6298d3210de60e5d3e5c25f70efbd17783117ae9095d4aa3b3c" Sep 29 11:19:58 crc kubenswrapper[4779]: I0929 11:19:58.923519 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"794dac6e9c01d6298d3210de60e5d3e5c25f70efbd17783117ae9095d4aa3b3c"} err="failed to get container status \"794dac6e9c01d6298d3210de60e5d3e5c25f70efbd17783117ae9095d4aa3b3c\": rpc error: code = NotFound desc = could not find container \"794dac6e9c01d6298d3210de60e5d3e5c25f70efbd17783117ae9095d4aa3b3c\": container with ID starting with 794dac6e9c01d6298d3210de60e5d3e5c25f70efbd17783117ae9095d4aa3b3c not found: ID does not exist" Sep 29 11:19:58 crc kubenswrapper[4779]: I0929 11:19:58.923546 4779 scope.go:117] "RemoveContainer" containerID="876b3f1397f570efce8c515c78fdb245596b0a85ff9433e5385caafb78c3db2e" Sep 29 11:19:58 crc kubenswrapper[4779]: E0929 11:19:58.923990 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"876b3f1397f570efce8c515c78fdb245596b0a85ff9433e5385caafb78c3db2e\": container with ID starting with 876b3f1397f570efce8c515c78fdb245596b0a85ff9433e5385caafb78c3db2e not found: ID does not exist" containerID="876b3f1397f570efce8c515c78fdb245596b0a85ff9433e5385caafb78c3db2e" Sep 29 11:19:58 crc kubenswrapper[4779]: I0929 11:19:58.924041 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"876b3f1397f570efce8c515c78fdb245596b0a85ff9433e5385caafb78c3db2e"} err="failed to get container status \"876b3f1397f570efce8c515c78fdb245596b0a85ff9433e5385caafb78c3db2e\": rpc error: code = NotFound desc = could not find container \"876b3f1397f570efce8c515c78fdb245596b0a85ff9433e5385caafb78c3db2e\": container with ID starting with 876b3f1397f570efce8c515c78fdb245596b0a85ff9433e5385caafb78c3db2e not found: ID does not exist" Sep 29 11:19:58 crc kubenswrapper[4779]: I0929 11:19:58.924072 4779 scope.go:117] "RemoveContainer" containerID="2ecaa06012a77eef6600d47cddd06d5e77b8d622676cd7d1fcc91683325b90c2" Sep 29 11:19:58 crc kubenswrapper[4779]: E0929 11:19:58.924437 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2ecaa06012a77eef6600d47cddd06d5e77b8d622676cd7d1fcc91683325b90c2\": container with ID starting with 2ecaa06012a77eef6600d47cddd06d5e77b8d622676cd7d1fcc91683325b90c2 not found: ID does not exist" containerID="2ecaa06012a77eef6600d47cddd06d5e77b8d622676cd7d1fcc91683325b90c2" Sep 29 11:19:58 crc kubenswrapper[4779]: I0929 11:19:58.924467 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ecaa06012a77eef6600d47cddd06d5e77b8d622676cd7d1fcc91683325b90c2"} err="failed to get container status \"2ecaa06012a77eef6600d47cddd06d5e77b8d622676cd7d1fcc91683325b90c2\": rpc error: code = NotFound desc = could not find container \"2ecaa06012a77eef6600d47cddd06d5e77b8d622676cd7d1fcc91683325b90c2\": container with ID starting with 2ecaa06012a77eef6600d47cddd06d5e77b8d622676cd7d1fcc91683325b90c2 not found: ID does not exist" Sep 29 11:20:00 crc kubenswrapper[4779]: I0929 11:20:00.726395 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="124a95e5-75ea-4a0c-937e-9ef8a300caf3" path="/var/lib/kubelet/pods/124a95e5-75ea-4a0c-937e-9ef8a300caf3/volumes" Sep 29 11:20:12 crc kubenswrapper[4779]: I0929 11:20:12.976383 4779 generic.go:334] "Generic (PLEG): container finished" podID="cdcadbcc-d21a-41c7-aab2-fabb14e9042b" containerID="f0a3a6dc8539c36e477671f37ed07802b242defccf3926fb6111fe40bd53ec80" exitCode=0 Sep 29 11:20:12 crc kubenswrapper[4779]: I0929 11:20:12.976752 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-pnns5/crc-debug-2d7c5" event={"ID":"cdcadbcc-d21a-41c7-aab2-fabb14e9042b","Type":"ContainerDied","Data":"f0a3a6dc8539c36e477671f37ed07802b242defccf3926fb6111fe40bd53ec80"} Sep 29 11:20:14 crc kubenswrapper[4779]: I0929 11:20:14.091960 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-pnns5/crc-debug-2d7c5" Sep 29 11:20:14 crc kubenswrapper[4779]: I0929 11:20:14.140447 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-pnns5/crc-debug-2d7c5"] Sep 29 11:20:14 crc kubenswrapper[4779]: I0929 11:20:14.149700 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-pnns5/crc-debug-2d7c5"] Sep 29 11:20:14 crc kubenswrapper[4779]: I0929 11:20:14.214749 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/cdcadbcc-d21a-41c7-aab2-fabb14e9042b-host\") pod \"cdcadbcc-d21a-41c7-aab2-fabb14e9042b\" (UID: \"cdcadbcc-d21a-41c7-aab2-fabb14e9042b\") " Sep 29 11:20:14 crc kubenswrapper[4779]: I0929 11:20:14.214894 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/cdcadbcc-d21a-41c7-aab2-fabb14e9042b-host" (OuterVolumeSpecName: "host") pod "cdcadbcc-d21a-41c7-aab2-fabb14e9042b" (UID: "cdcadbcc-d21a-41c7-aab2-fabb14e9042b"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 11:20:14 crc kubenswrapper[4779]: I0929 11:20:14.215136 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hz2lg\" (UniqueName: \"kubernetes.io/projected/cdcadbcc-d21a-41c7-aab2-fabb14e9042b-kube-api-access-hz2lg\") pod \"cdcadbcc-d21a-41c7-aab2-fabb14e9042b\" (UID: \"cdcadbcc-d21a-41c7-aab2-fabb14e9042b\") " Sep 29 11:20:14 crc kubenswrapper[4779]: I0929 11:20:14.215597 4779 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/cdcadbcc-d21a-41c7-aab2-fabb14e9042b-host\") on node \"crc\" DevicePath \"\"" Sep 29 11:20:14 crc kubenswrapper[4779]: I0929 11:20:14.222072 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cdcadbcc-d21a-41c7-aab2-fabb14e9042b-kube-api-access-hz2lg" (OuterVolumeSpecName: "kube-api-access-hz2lg") pod "cdcadbcc-d21a-41c7-aab2-fabb14e9042b" (UID: "cdcadbcc-d21a-41c7-aab2-fabb14e9042b"). InnerVolumeSpecName "kube-api-access-hz2lg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 11:20:14 crc kubenswrapper[4779]: I0929 11:20:14.317508 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hz2lg\" (UniqueName: \"kubernetes.io/projected/cdcadbcc-d21a-41c7-aab2-fabb14e9042b-kube-api-access-hz2lg\") on node \"crc\" DevicePath \"\"" Sep 29 11:20:14 crc kubenswrapper[4779]: I0929 11:20:14.727160 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cdcadbcc-d21a-41c7-aab2-fabb14e9042b" path="/var/lib/kubelet/pods/cdcadbcc-d21a-41c7-aab2-fabb14e9042b/volumes" Sep 29 11:20:14 crc kubenswrapper[4779]: I0929 11:20:14.997363 4779 scope.go:117] "RemoveContainer" containerID="f0a3a6dc8539c36e477671f37ed07802b242defccf3926fb6111fe40bd53ec80" Sep 29 11:20:14 crc kubenswrapper[4779]: I0929 11:20:14.997417 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-pnns5/crc-debug-2d7c5" Sep 29 11:20:15 crc kubenswrapper[4779]: I0929 11:20:15.354365 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-pnns5/crc-debug-94g4c"] Sep 29 11:20:15 crc kubenswrapper[4779]: E0929 11:20:15.355156 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="124a95e5-75ea-4a0c-937e-9ef8a300caf3" containerName="registry-server" Sep 29 11:20:15 crc kubenswrapper[4779]: I0929 11:20:15.355171 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="124a95e5-75ea-4a0c-937e-9ef8a300caf3" containerName="registry-server" Sep 29 11:20:15 crc kubenswrapper[4779]: E0929 11:20:15.355195 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="124a95e5-75ea-4a0c-937e-9ef8a300caf3" containerName="extract-content" Sep 29 11:20:15 crc kubenswrapper[4779]: I0929 11:20:15.355204 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="124a95e5-75ea-4a0c-937e-9ef8a300caf3" containerName="extract-content" Sep 29 11:20:15 crc kubenswrapper[4779]: E0929 11:20:15.355230 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="124a95e5-75ea-4a0c-937e-9ef8a300caf3" containerName="extract-utilities" Sep 29 11:20:15 crc kubenswrapper[4779]: I0929 11:20:15.355237 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="124a95e5-75ea-4a0c-937e-9ef8a300caf3" containerName="extract-utilities" Sep 29 11:20:15 crc kubenswrapper[4779]: E0929 11:20:15.355255 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cdcadbcc-d21a-41c7-aab2-fabb14e9042b" containerName="container-00" Sep 29 11:20:15 crc kubenswrapper[4779]: I0929 11:20:15.355262 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="cdcadbcc-d21a-41c7-aab2-fabb14e9042b" containerName="container-00" Sep 29 11:20:15 crc kubenswrapper[4779]: I0929 11:20:15.355446 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="124a95e5-75ea-4a0c-937e-9ef8a300caf3" containerName="registry-server" Sep 29 11:20:15 crc kubenswrapper[4779]: I0929 11:20:15.355474 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="cdcadbcc-d21a-41c7-aab2-fabb14e9042b" containerName="container-00" Sep 29 11:20:15 crc kubenswrapper[4779]: I0929 11:20:15.356344 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-pnns5/crc-debug-94g4c" Sep 29 11:20:15 crc kubenswrapper[4779]: I0929 11:20:15.443281 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/93283d85-f848-49b7-9898-2cff9792212e-host\") pod \"crc-debug-94g4c\" (UID: \"93283d85-f848-49b7-9898-2cff9792212e\") " pod="openshift-must-gather-pnns5/crc-debug-94g4c" Sep 29 11:20:15 crc kubenswrapper[4779]: I0929 11:20:15.443434 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z66zq\" (UniqueName: \"kubernetes.io/projected/93283d85-f848-49b7-9898-2cff9792212e-kube-api-access-z66zq\") pod \"crc-debug-94g4c\" (UID: \"93283d85-f848-49b7-9898-2cff9792212e\") " pod="openshift-must-gather-pnns5/crc-debug-94g4c" Sep 29 11:20:15 crc kubenswrapper[4779]: I0929 11:20:15.545885 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/93283d85-f848-49b7-9898-2cff9792212e-host\") pod \"crc-debug-94g4c\" (UID: \"93283d85-f848-49b7-9898-2cff9792212e\") " pod="openshift-must-gather-pnns5/crc-debug-94g4c" Sep 29 11:20:15 crc kubenswrapper[4779]: I0929 11:20:15.546019 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z66zq\" (UniqueName: \"kubernetes.io/projected/93283d85-f848-49b7-9898-2cff9792212e-kube-api-access-z66zq\") pod \"crc-debug-94g4c\" (UID: \"93283d85-f848-49b7-9898-2cff9792212e\") " pod="openshift-must-gather-pnns5/crc-debug-94g4c" Sep 29 11:20:15 crc kubenswrapper[4779]: I0929 11:20:15.546553 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/93283d85-f848-49b7-9898-2cff9792212e-host\") pod \"crc-debug-94g4c\" (UID: \"93283d85-f848-49b7-9898-2cff9792212e\") " pod="openshift-must-gather-pnns5/crc-debug-94g4c" Sep 29 11:20:15 crc kubenswrapper[4779]: I0929 11:20:15.573897 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z66zq\" (UniqueName: \"kubernetes.io/projected/93283d85-f848-49b7-9898-2cff9792212e-kube-api-access-z66zq\") pod \"crc-debug-94g4c\" (UID: \"93283d85-f848-49b7-9898-2cff9792212e\") " pod="openshift-must-gather-pnns5/crc-debug-94g4c" Sep 29 11:20:15 crc kubenswrapper[4779]: I0929 11:20:15.679183 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-pnns5/crc-debug-94g4c" Sep 29 11:20:16 crc kubenswrapper[4779]: I0929 11:20:16.014530 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-pnns5/crc-debug-94g4c" event={"ID":"93283d85-f848-49b7-9898-2cff9792212e","Type":"ContainerStarted","Data":"220528a6cb5d0258af78014b1af4861bc26db1cb1839cfce87ae080edfe7bbae"} Sep 29 11:20:16 crc kubenswrapper[4779]: I0929 11:20:16.014866 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-pnns5/crc-debug-94g4c" event={"ID":"93283d85-f848-49b7-9898-2cff9792212e","Type":"ContainerStarted","Data":"264d0b932d4d47972aacae6053e6968cdc15b12880c0d3bb074870975f722a96"} Sep 29 11:20:16 crc kubenswrapper[4779]: I0929 11:20:16.034117 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-pnns5/crc-debug-94g4c" podStartSLOduration=1.034087978 podStartE2EDuration="1.034087978s" podCreationTimestamp="2025-09-29 11:20:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 11:20:16.033305096 +0000 UTC m=+6648.014629000" watchObservedRunningTime="2025-09-29 11:20:16.034087978 +0000 UTC m=+6648.015411882" Sep 29 11:20:17 crc kubenswrapper[4779]: I0929 11:20:17.025836 4779 generic.go:334] "Generic (PLEG): container finished" podID="93283d85-f848-49b7-9898-2cff9792212e" containerID="220528a6cb5d0258af78014b1af4861bc26db1cb1839cfce87ae080edfe7bbae" exitCode=0 Sep 29 11:20:17 crc kubenswrapper[4779]: I0929 11:20:17.025958 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-pnns5/crc-debug-94g4c" event={"ID":"93283d85-f848-49b7-9898-2cff9792212e","Type":"ContainerDied","Data":"220528a6cb5d0258af78014b1af4861bc26db1cb1839cfce87ae080edfe7bbae"} Sep 29 11:20:18 crc kubenswrapper[4779]: I0929 11:20:18.138839 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-pnns5/crc-debug-94g4c" Sep 29 11:20:18 crc kubenswrapper[4779]: I0929 11:20:18.314679 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z66zq\" (UniqueName: \"kubernetes.io/projected/93283d85-f848-49b7-9898-2cff9792212e-kube-api-access-z66zq\") pod \"93283d85-f848-49b7-9898-2cff9792212e\" (UID: \"93283d85-f848-49b7-9898-2cff9792212e\") " Sep 29 11:20:18 crc kubenswrapper[4779]: I0929 11:20:18.314855 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/93283d85-f848-49b7-9898-2cff9792212e-host\") pod \"93283d85-f848-49b7-9898-2cff9792212e\" (UID: \"93283d85-f848-49b7-9898-2cff9792212e\") " Sep 29 11:20:18 crc kubenswrapper[4779]: I0929 11:20:18.315022 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/93283d85-f848-49b7-9898-2cff9792212e-host" (OuterVolumeSpecName: "host") pod "93283d85-f848-49b7-9898-2cff9792212e" (UID: "93283d85-f848-49b7-9898-2cff9792212e"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 11:20:18 crc kubenswrapper[4779]: I0929 11:20:18.316461 4779 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/93283d85-f848-49b7-9898-2cff9792212e-host\") on node \"crc\" DevicePath \"\"" Sep 29 11:20:18 crc kubenswrapper[4779]: I0929 11:20:18.322577 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/93283d85-f848-49b7-9898-2cff9792212e-kube-api-access-z66zq" (OuterVolumeSpecName: "kube-api-access-z66zq") pod "93283d85-f848-49b7-9898-2cff9792212e" (UID: "93283d85-f848-49b7-9898-2cff9792212e"). InnerVolumeSpecName "kube-api-access-z66zq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 11:20:18 crc kubenswrapper[4779]: I0929 11:20:18.418400 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z66zq\" (UniqueName: \"kubernetes.io/projected/93283d85-f848-49b7-9898-2cff9792212e-kube-api-access-z66zq\") on node \"crc\" DevicePath \"\"" Sep 29 11:20:19 crc kubenswrapper[4779]: I0929 11:20:19.047872 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-pnns5/crc-debug-94g4c" event={"ID":"93283d85-f848-49b7-9898-2cff9792212e","Type":"ContainerDied","Data":"264d0b932d4d47972aacae6053e6968cdc15b12880c0d3bb074870975f722a96"} Sep 29 11:20:19 crc kubenswrapper[4779]: I0929 11:20:19.047964 4779 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="264d0b932d4d47972aacae6053e6968cdc15b12880c0d3bb074870975f722a96" Sep 29 11:20:19 crc kubenswrapper[4779]: I0929 11:20:19.047981 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-pnns5/crc-debug-94g4c" Sep 29 11:20:26 crc kubenswrapper[4779]: I0929 11:20:26.000604 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-pnns5/crc-debug-94g4c"] Sep 29 11:20:26 crc kubenswrapper[4779]: I0929 11:20:26.009564 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-pnns5/crc-debug-94g4c"] Sep 29 11:20:26 crc kubenswrapper[4779]: I0929 11:20:26.726395 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="93283d85-f848-49b7-9898-2cff9792212e" path="/var/lib/kubelet/pods/93283d85-f848-49b7-9898-2cff9792212e/volumes" Sep 29 11:20:27 crc kubenswrapper[4779]: I0929 11:20:27.177050 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-pnns5/crc-debug-s24nt"] Sep 29 11:20:27 crc kubenswrapper[4779]: E0929 11:20:27.177546 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="93283d85-f848-49b7-9898-2cff9792212e" containerName="container-00" Sep 29 11:20:27 crc kubenswrapper[4779]: I0929 11:20:27.177561 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="93283d85-f848-49b7-9898-2cff9792212e" containerName="container-00" Sep 29 11:20:27 crc kubenswrapper[4779]: I0929 11:20:27.177837 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="93283d85-f848-49b7-9898-2cff9792212e" containerName="container-00" Sep 29 11:20:27 crc kubenswrapper[4779]: I0929 11:20:27.178773 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-pnns5/crc-debug-s24nt" Sep 29 11:20:27 crc kubenswrapper[4779]: I0929 11:20:27.308591 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a9fa8c0c-f285-4ab7-8f91-d584a79180ca-host\") pod \"crc-debug-s24nt\" (UID: \"a9fa8c0c-f285-4ab7-8f91-d584a79180ca\") " pod="openshift-must-gather-pnns5/crc-debug-s24nt" Sep 29 11:20:27 crc kubenswrapper[4779]: I0929 11:20:27.309078 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mj2pb\" (UniqueName: \"kubernetes.io/projected/a9fa8c0c-f285-4ab7-8f91-d584a79180ca-kube-api-access-mj2pb\") pod \"crc-debug-s24nt\" (UID: \"a9fa8c0c-f285-4ab7-8f91-d584a79180ca\") " pod="openshift-must-gather-pnns5/crc-debug-s24nt" Sep 29 11:20:27 crc kubenswrapper[4779]: I0929 11:20:27.411283 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mj2pb\" (UniqueName: \"kubernetes.io/projected/a9fa8c0c-f285-4ab7-8f91-d584a79180ca-kube-api-access-mj2pb\") pod \"crc-debug-s24nt\" (UID: \"a9fa8c0c-f285-4ab7-8f91-d584a79180ca\") " pod="openshift-must-gather-pnns5/crc-debug-s24nt" Sep 29 11:20:27 crc kubenswrapper[4779]: I0929 11:20:27.411423 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a9fa8c0c-f285-4ab7-8f91-d584a79180ca-host\") pod \"crc-debug-s24nt\" (UID: \"a9fa8c0c-f285-4ab7-8f91-d584a79180ca\") " pod="openshift-must-gather-pnns5/crc-debug-s24nt" Sep 29 11:20:27 crc kubenswrapper[4779]: I0929 11:20:27.411571 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a9fa8c0c-f285-4ab7-8f91-d584a79180ca-host\") pod \"crc-debug-s24nt\" (UID: \"a9fa8c0c-f285-4ab7-8f91-d584a79180ca\") " pod="openshift-must-gather-pnns5/crc-debug-s24nt" Sep 29 11:20:27 crc kubenswrapper[4779]: I0929 11:20:27.430134 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mj2pb\" (UniqueName: \"kubernetes.io/projected/a9fa8c0c-f285-4ab7-8f91-d584a79180ca-kube-api-access-mj2pb\") pod \"crc-debug-s24nt\" (UID: \"a9fa8c0c-f285-4ab7-8f91-d584a79180ca\") " pod="openshift-must-gather-pnns5/crc-debug-s24nt" Sep 29 11:20:27 crc kubenswrapper[4779]: I0929 11:20:27.498820 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-pnns5/crc-debug-s24nt" Sep 29 11:20:28 crc kubenswrapper[4779]: I0929 11:20:28.132803 4779 generic.go:334] "Generic (PLEG): container finished" podID="a9fa8c0c-f285-4ab7-8f91-d584a79180ca" containerID="8f6935715c2472867a9ebcbcc50d0715491e8dd586a75c4368c273cfb0471c09" exitCode=0 Sep 29 11:20:28 crc kubenswrapper[4779]: I0929 11:20:28.132880 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-pnns5/crc-debug-s24nt" event={"ID":"a9fa8c0c-f285-4ab7-8f91-d584a79180ca","Type":"ContainerDied","Data":"8f6935715c2472867a9ebcbcc50d0715491e8dd586a75c4368c273cfb0471c09"} Sep 29 11:20:28 crc kubenswrapper[4779]: I0929 11:20:28.133159 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-pnns5/crc-debug-s24nt" event={"ID":"a9fa8c0c-f285-4ab7-8f91-d584a79180ca","Type":"ContainerStarted","Data":"340ed031c0372a2ffae5267b45b7a308d010c245354420ab8f809224eb8234a9"} Sep 29 11:20:28 crc kubenswrapper[4779]: I0929 11:20:28.172088 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-pnns5/crc-debug-s24nt"] Sep 29 11:20:28 crc kubenswrapper[4779]: I0929 11:20:28.183734 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-pnns5/crc-debug-s24nt"] Sep 29 11:20:29 crc kubenswrapper[4779]: I0929 11:20:29.258143 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-pnns5/crc-debug-s24nt" Sep 29 11:20:29 crc kubenswrapper[4779]: I0929 11:20:29.350147 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a9fa8c0c-f285-4ab7-8f91-d584a79180ca-host\") pod \"a9fa8c0c-f285-4ab7-8f91-d584a79180ca\" (UID: \"a9fa8c0c-f285-4ab7-8f91-d584a79180ca\") " Sep 29 11:20:29 crc kubenswrapper[4779]: I0929 11:20:29.350200 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mj2pb\" (UniqueName: \"kubernetes.io/projected/a9fa8c0c-f285-4ab7-8f91-d584a79180ca-kube-api-access-mj2pb\") pod \"a9fa8c0c-f285-4ab7-8f91-d584a79180ca\" (UID: \"a9fa8c0c-f285-4ab7-8f91-d584a79180ca\") " Sep 29 11:20:29 crc kubenswrapper[4779]: I0929 11:20:29.350310 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a9fa8c0c-f285-4ab7-8f91-d584a79180ca-host" (OuterVolumeSpecName: "host") pod "a9fa8c0c-f285-4ab7-8f91-d584a79180ca" (UID: "a9fa8c0c-f285-4ab7-8f91-d584a79180ca"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 11:20:29 crc kubenswrapper[4779]: I0929 11:20:29.350927 4779 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a9fa8c0c-f285-4ab7-8f91-d584a79180ca-host\") on node \"crc\" DevicePath \"\"" Sep 29 11:20:29 crc kubenswrapper[4779]: I0929 11:20:29.369770 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a9fa8c0c-f285-4ab7-8f91-d584a79180ca-kube-api-access-mj2pb" (OuterVolumeSpecName: "kube-api-access-mj2pb") pod "a9fa8c0c-f285-4ab7-8f91-d584a79180ca" (UID: "a9fa8c0c-f285-4ab7-8f91-d584a79180ca"). InnerVolumeSpecName "kube-api-access-mj2pb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 11:20:29 crc kubenswrapper[4779]: I0929 11:20:29.452487 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mj2pb\" (UniqueName: \"kubernetes.io/projected/a9fa8c0c-f285-4ab7-8f91-d584a79180ca-kube-api-access-mj2pb\") on node \"crc\" DevicePath \"\"" Sep 29 11:20:29 crc kubenswrapper[4779]: I0929 11:20:29.961483 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-6495d75b5-dwvhq_02af413e-f4cb-413e-b029-5410ad2bb9ab/kube-rbac-proxy/0.log" Sep 29 11:20:30 crc kubenswrapper[4779]: I0929 11:20:30.038372 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-6495d75b5-dwvhq_02af413e-f4cb-413e-b029-5410ad2bb9ab/manager/0.log" Sep 29 11:20:30 crc kubenswrapper[4779]: I0929 11:20:30.174717 4779 scope.go:117] "RemoveContainer" containerID="8f6935715c2472867a9ebcbcc50d0715491e8dd586a75c4368c273cfb0471c09" Sep 29 11:20:30 crc kubenswrapper[4779]: I0929 11:20:30.174869 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-pnns5/crc-debug-s24nt" Sep 29 11:20:30 crc kubenswrapper[4779]: I0929 11:20:30.294484 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_bc479d90579b9ea3dd3b3145e413235762d1da82158e3625500bb41a58z8rpl_3c43da05-c4b7-4316-b3c0-464cc862dc74/util/0.log" Sep 29 11:20:30 crc kubenswrapper[4779]: I0929 11:20:30.455528 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_bc479d90579b9ea3dd3b3145e413235762d1da82158e3625500bb41a58z8rpl_3c43da05-c4b7-4316-b3c0-464cc862dc74/util/0.log" Sep 29 11:20:30 crc kubenswrapper[4779]: I0929 11:20:30.458232 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_bc479d90579b9ea3dd3b3145e413235762d1da82158e3625500bb41a58z8rpl_3c43da05-c4b7-4316-b3c0-464cc862dc74/pull/0.log" Sep 29 11:20:30 crc kubenswrapper[4779]: I0929 11:20:30.487712 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_bc479d90579b9ea3dd3b3145e413235762d1da82158e3625500bb41a58z8rpl_3c43da05-c4b7-4316-b3c0-464cc862dc74/pull/0.log" Sep 29 11:20:30 crc kubenswrapper[4779]: I0929 11:20:30.701556 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_bc479d90579b9ea3dd3b3145e413235762d1da82158e3625500bb41a58z8rpl_3c43da05-c4b7-4316-b3c0-464cc862dc74/pull/0.log" Sep 29 11:20:30 crc kubenswrapper[4779]: I0929 11:20:30.717485 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_bc479d90579b9ea3dd3b3145e413235762d1da82158e3625500bb41a58z8rpl_3c43da05-c4b7-4316-b3c0-464cc862dc74/extract/0.log" Sep 29 11:20:30 crc kubenswrapper[4779]: I0929 11:20:30.728311 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a9fa8c0c-f285-4ab7-8f91-d584a79180ca" path="/var/lib/kubelet/pods/a9fa8c0c-f285-4ab7-8f91-d584a79180ca/volumes" Sep 29 11:20:30 crc kubenswrapper[4779]: I0929 11:20:30.742494 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_bc479d90579b9ea3dd3b3145e413235762d1da82158e3625500bb41a58z8rpl_3c43da05-c4b7-4316-b3c0-464cc862dc74/util/0.log" Sep 29 11:20:30 crc kubenswrapper[4779]: I0929 11:20:30.891032 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-748c574d75-cssz9_a5133455-fda2-4b98-9465-8421aae72e9c/kube-rbac-proxy/0.log" Sep 29 11:20:30 crc kubenswrapper[4779]: I0929 11:20:30.954024 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-748c574d75-cssz9_a5133455-fda2-4b98-9465-8421aae72e9c/manager/0.log" Sep 29 11:20:31 crc kubenswrapper[4779]: I0929 11:20:31.030676 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-7d74f4d695-jdvhr_01b3e1fb-fbfd-48c6-a89f-cc347df3a24b/kube-rbac-proxy/0.log" Sep 29 11:20:31 crc kubenswrapper[4779]: I0929 11:20:31.152887 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-7d74f4d695-jdvhr_01b3e1fb-fbfd-48c6-a89f-cc347df3a24b/manager/0.log" Sep 29 11:20:31 crc kubenswrapper[4779]: I0929 11:20:31.190190 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-67b5d44b7f-6r9tp_3ebfd5a9-ed4f-4589-900e-0c44346fece4/kube-rbac-proxy/0.log" Sep 29 11:20:31 crc kubenswrapper[4779]: I0929 11:20:31.334753 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-67b5d44b7f-6r9tp_3ebfd5a9-ed4f-4589-900e-0c44346fece4/manager/0.log" Sep 29 11:20:31 crc kubenswrapper[4779]: I0929 11:20:31.409205 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-8ff95898-z2ltj_618315d7-82b7-469d-ba71-a3fbb71ae08c/manager/0.log" Sep 29 11:20:31 crc kubenswrapper[4779]: I0929 11:20:31.420405 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-8ff95898-z2ltj_618315d7-82b7-469d-ba71-a3fbb71ae08c/kube-rbac-proxy/0.log" Sep 29 11:20:31 crc kubenswrapper[4779]: I0929 11:20:31.593510 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-695847bc78-gnhwx_c9015e54-4a8c-4d07-ae64-74c380a50a22/kube-rbac-proxy/0.log" Sep 29 11:20:31 crc kubenswrapper[4779]: I0929 11:20:31.692794 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-695847bc78-gnhwx_c9015e54-4a8c-4d07-ae64-74c380a50a22/manager/0.log" Sep 29 11:20:31 crc kubenswrapper[4779]: I0929 11:20:31.757565 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-858cd69f49-7pldk_a6be3c3e-f7d0-4134-8fb7-c24ee5c13c2c/kube-rbac-proxy/0.log" Sep 29 11:20:31 crc kubenswrapper[4779]: I0929 11:20:31.914314 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-9fc8d5567-xxc4j_31462d03-e504-4493-af67-3a5bc9eee5f7/kube-rbac-proxy/0.log" Sep 29 11:20:32 crc kubenswrapper[4779]: I0929 11:20:32.000126 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-858cd69f49-7pldk_a6be3c3e-f7d0-4134-8fb7-c24ee5c13c2c/manager/0.log" Sep 29 11:20:32 crc kubenswrapper[4779]: I0929 11:20:32.059050 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-9fc8d5567-xxc4j_31462d03-e504-4493-af67-3a5bc9eee5f7/manager/0.log" Sep 29 11:20:32 crc kubenswrapper[4779]: I0929 11:20:32.204744 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7bf498966c-8fwmh_d2ec9063-4ca4-4280-b98c-198da389f005/kube-rbac-proxy/0.log" Sep 29 11:20:32 crc kubenswrapper[4779]: I0929 11:20:32.278966 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7bf498966c-8fwmh_d2ec9063-4ca4-4280-b98c-198da389f005/manager/0.log" Sep 29 11:20:32 crc kubenswrapper[4779]: I0929 11:20:32.392291 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-56cf9c6b99-ll5vc_ad0344fd-e85f-41bc-88da-d38d5ce5add8/kube-rbac-proxy/0.log" Sep 29 11:20:32 crc kubenswrapper[4779]: I0929 11:20:32.407768 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-56cf9c6b99-ll5vc_ad0344fd-e85f-41bc-88da-d38d5ce5add8/manager/0.log" Sep 29 11:20:32 crc kubenswrapper[4779]: I0929 11:20:32.505096 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-687b9cf756-d7n87_2bf3c066-8608-4d04-8c2a-7570b23edebe/kube-rbac-proxy/0.log" Sep 29 11:20:32 crc kubenswrapper[4779]: I0929 11:20:32.850149 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-687b9cf756-d7n87_2bf3c066-8608-4d04-8c2a-7570b23edebe/manager/0.log" Sep 29 11:20:32 crc kubenswrapper[4779]: I0929 11:20:32.934800 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-54d766c9f9-lbx77_fa1b2d18-b1e3-410e-864f-84c2d892474a/kube-rbac-proxy/0.log" Sep 29 11:20:33 crc kubenswrapper[4779]: I0929 11:20:33.043241 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-54d766c9f9-lbx77_fa1b2d18-b1e3-410e-864f-84c2d892474a/manager/0.log" Sep 29 11:20:33 crc kubenswrapper[4779]: I0929 11:20:33.060219 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-c7c776c96-b86j5_902219f3-1427-4cf0-9b3f-5879caf3e30d/kube-rbac-proxy/0.log" Sep 29 11:20:33 crc kubenswrapper[4779]: I0929 11:20:33.240529 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-c7c776c96-b86j5_902219f3-1427-4cf0-9b3f-5879caf3e30d/manager/0.log" Sep 29 11:20:33 crc kubenswrapper[4779]: I0929 11:20:33.301976 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-76fcc6dc7c-6rzg7_fd02f073-8084-4117-b444-292a0e41e629/manager/0.log" Sep 29 11:20:33 crc kubenswrapper[4779]: I0929 11:20:33.350352 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-76fcc6dc7c-6rzg7_fd02f073-8084-4117-b444-292a0e41e629/kube-rbac-proxy/0.log" Sep 29 11:20:33 crc kubenswrapper[4779]: I0929 11:20:33.479257 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6d776955-g6nvc_7731544d-11b0-44ef-8a11-163cd6e9cb53/kube-rbac-proxy/0.log" Sep 29 11:20:33 crc kubenswrapper[4779]: I0929 11:20:33.529382 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6d776955-g6nvc_7731544d-11b0-44ef-8a11-163cd6e9cb53/manager/0.log" Sep 29 11:20:33 crc kubenswrapper[4779]: I0929 11:20:33.721029 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-84d66d6d97-m2nkr_533fb7be-d888-4af5-8533-4dd61056500c/kube-rbac-proxy/0.log" Sep 29 11:20:33 crc kubenswrapper[4779]: I0929 11:20:33.938503 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-6f5d85b94b-vc7f6_6ed08027-8bec-43f6-a452-703eebf6792b/kube-rbac-proxy/0.log" Sep 29 11:20:34 crc kubenswrapper[4779]: I0929 11:20:34.064438 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-hj95j_8ead15f2-8f4d-49ff-bd74-a535f25bad67/registry-server/0.log" Sep 29 11:20:34 crc kubenswrapper[4779]: I0929 11:20:34.111691 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-6f5d85b94b-vc7f6_6ed08027-8bec-43f6-a452-703eebf6792b/operator/0.log" Sep 29 11:20:34 crc kubenswrapper[4779]: I0929 11:20:34.274198 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-5f95c46c78-7vw66_a89433a3-b8b0-4c71-ad8e-32dd617dc69e/kube-rbac-proxy/0.log" Sep 29 11:20:34 crc kubenswrapper[4779]: I0929 11:20:34.412308 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-5f95c46c78-7vw66_a89433a3-b8b0-4c71-ad8e-32dd617dc69e/manager/0.log" Sep 29 11:20:34 crc kubenswrapper[4779]: I0929 11:20:34.457519 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-774b97b48-pzd6k_37f9c445-f9e3-47a6-9cc5-63133c13e09f/kube-rbac-proxy/0.log" Sep 29 11:20:34 crc kubenswrapper[4779]: I0929 11:20:34.603460 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-774b97b48-pzd6k_37f9c445-f9e3-47a6-9cc5-63133c13e09f/manager/0.log" Sep 29 11:20:34 crc kubenswrapper[4779]: I0929 11:20:34.707494 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-79d8469568-kgz2h_90b2d814-d613-4ee6-bbce-23aad07f8d1c/operator/0.log" Sep 29 11:20:34 crc kubenswrapper[4779]: I0929 11:20:34.854250 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-bc7dc7bd9-2zb2m_2520e0fa-6109-4297-b41b-ff1de862f6a1/manager/0.log" Sep 29 11:20:34 crc kubenswrapper[4779]: I0929 11:20:34.874152 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-bc7dc7bd9-2zb2m_2520e0fa-6109-4297-b41b-ff1de862f6a1/kube-rbac-proxy/0.log" Sep 29 11:20:34 crc kubenswrapper[4779]: I0929 11:20:34.998762 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-5bf96cfbc4-8gzc8_7817ca80-efc5-49a0-ba11-daffb918491e/kube-rbac-proxy/0.log" Sep 29 11:20:35 crc kubenswrapper[4779]: I0929 11:20:35.216396 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-f66b554c6-28w58_b4e41036-b598-4c28-b813-3fec8f90be39/manager/0.log" Sep 29 11:20:35 crc kubenswrapper[4779]: I0929 11:20:35.231721 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-f66b554c6-28w58_b4e41036-b598-4c28-b813-3fec8f90be39/kube-rbac-proxy/0.log" Sep 29 11:20:35 crc kubenswrapper[4779]: I0929 11:20:35.365886 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-84d66d6d97-m2nkr_533fb7be-d888-4af5-8533-4dd61056500c/manager/0.log" Sep 29 11:20:35 crc kubenswrapper[4779]: I0929 11:20:35.429052 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-5bf96cfbc4-8gzc8_7817ca80-efc5-49a0-ba11-daffb918491e/manager/0.log" Sep 29 11:20:35 crc kubenswrapper[4779]: I0929 11:20:35.445300 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-6f4f448b94-qhpm7_84dd49ae-5773-4135-886d-9f3c5a7c7b4b/kube-rbac-proxy/0.log" Sep 29 11:20:35 crc kubenswrapper[4779]: I0929 11:20:35.546884 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-6f4f448b94-qhpm7_84dd49ae-5773-4135-886d-9f3c5a7c7b4b/manager/0.log" Sep 29 11:20:46 crc kubenswrapper[4779]: I0929 11:20:46.966058 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 11:20:46 crc kubenswrapper[4779]: I0929 11:20:46.966960 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 11:20:51 crc kubenswrapper[4779]: I0929 11:20:51.379047 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-bp6p9_b9929052-c8a6-48a6-9520-9b8f4dc396e0/control-plane-machine-set-operator/0.log" Sep 29 11:20:51 crc kubenswrapper[4779]: I0929 11:20:51.546115 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-fq82t_edc8c507-aa54-4f0e-b64f-265ff1860ca0/kube-rbac-proxy/0.log" Sep 29 11:20:51 crc kubenswrapper[4779]: I0929 11:20:51.613402 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-fq82t_edc8c507-aa54-4f0e-b64f-265ff1860ca0/machine-api-operator/0.log" Sep 29 11:21:04 crc kubenswrapper[4779]: I0929 11:21:04.345149 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-cj4ns_547d471c-18d8-4abb-bbfb-99eb0d042eae/cert-manager-controller/0.log" Sep 29 11:21:04 crc kubenswrapper[4779]: I0929 11:21:04.591334 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-znrc5_8941c68d-b798-425d-add1-c47fb552d2ba/cert-manager-webhook/0.log" Sep 29 11:21:04 crc kubenswrapper[4779]: I0929 11:21:04.592352 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-mqmnc_4087c879-377a-4732-b7a5-67cd5e9552ed/cert-manager-cainjector/0.log" Sep 29 11:21:16 crc kubenswrapper[4779]: I0929 11:21:16.966793 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 11:21:16 crc kubenswrapper[4779]: I0929 11:21:16.967357 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 11:21:17 crc kubenswrapper[4779]: I0929 11:21:17.362571 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-864bb6dfb5-bdlk5_9a02daad-b7b9-4bc9-be5d-0a7e0c6d6c89/nmstate-console-plugin/0.log" Sep 29 11:21:17 crc kubenswrapper[4779]: I0929 11:21:17.541701 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-w26jk_4586a430-c13c-40b4-997f-2999ffbe07df/nmstate-handler/0.log" Sep 29 11:21:17 crc kubenswrapper[4779]: I0929 11:21:17.595166 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-58fcddf996-mfvhk_885dfe51-81d6-456c-b9df-ca6983913dd6/nmstate-metrics/0.log" Sep 29 11:21:17 crc kubenswrapper[4779]: I0929 11:21:17.599126 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-58fcddf996-mfvhk_885dfe51-81d6-456c-b9df-ca6983913dd6/kube-rbac-proxy/0.log" Sep 29 11:21:17 crc kubenswrapper[4779]: I0929 11:21:17.821251 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5d6f6cfd66-54tvp_91fe307a-bb82-495b-877d-d02d46ddf9b0/nmstate-operator/0.log" Sep 29 11:21:17 crc kubenswrapper[4779]: I0929 11:21:17.844620 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-6d689559c5-k8phl_b85e4091-5a43-4976-adb2-b9af4e2cdd06/nmstate-webhook/0.log" Sep 29 11:21:31 crc kubenswrapper[4779]: I0929 11:21:31.810625 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5d688f5ffc-l6gxt_c10a1808-8263-4c3f-8a8c-dc22de262eee/kube-rbac-proxy/0.log" Sep 29 11:21:31 crc kubenswrapper[4779]: I0929 11:21:31.990787 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-hs7fd_84543eb2-59d1-4d2c-986c-5836e23a4a68/cp-frr-files/0.log" Sep 29 11:21:32 crc kubenswrapper[4779]: I0929 11:21:32.035078 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5d688f5ffc-l6gxt_c10a1808-8263-4c3f-8a8c-dc22de262eee/controller/0.log" Sep 29 11:21:32 crc kubenswrapper[4779]: I0929 11:21:32.245974 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-hs7fd_84543eb2-59d1-4d2c-986c-5836e23a4a68/cp-reloader/0.log" Sep 29 11:21:32 crc kubenswrapper[4779]: I0929 11:21:32.272989 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-hs7fd_84543eb2-59d1-4d2c-986c-5836e23a4a68/cp-metrics/0.log" Sep 29 11:21:32 crc kubenswrapper[4779]: I0929 11:21:32.314445 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-hs7fd_84543eb2-59d1-4d2c-986c-5836e23a4a68/cp-reloader/0.log" Sep 29 11:21:32 crc kubenswrapper[4779]: I0929 11:21:32.319094 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-hs7fd_84543eb2-59d1-4d2c-986c-5836e23a4a68/cp-frr-files/0.log" Sep 29 11:21:32 crc kubenswrapper[4779]: I0929 11:21:32.512509 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-hs7fd_84543eb2-59d1-4d2c-986c-5836e23a4a68/cp-frr-files/0.log" Sep 29 11:21:32 crc kubenswrapper[4779]: I0929 11:21:32.553555 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-hs7fd_84543eb2-59d1-4d2c-986c-5836e23a4a68/cp-reloader/0.log" Sep 29 11:21:32 crc kubenswrapper[4779]: I0929 11:21:32.567650 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-hs7fd_84543eb2-59d1-4d2c-986c-5836e23a4a68/cp-metrics/0.log" Sep 29 11:21:32 crc kubenswrapper[4779]: I0929 11:21:32.570750 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-hs7fd_84543eb2-59d1-4d2c-986c-5836e23a4a68/cp-metrics/0.log" Sep 29 11:21:32 crc kubenswrapper[4779]: I0929 11:21:32.727409 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-hs7fd_84543eb2-59d1-4d2c-986c-5836e23a4a68/cp-reloader/0.log" Sep 29 11:21:32 crc kubenswrapper[4779]: I0929 11:21:32.745506 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-hs7fd_84543eb2-59d1-4d2c-986c-5836e23a4a68/cp-frr-files/0.log" Sep 29 11:21:32 crc kubenswrapper[4779]: I0929 11:21:32.787551 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-hs7fd_84543eb2-59d1-4d2c-986c-5836e23a4a68/controller/0.log" Sep 29 11:21:32 crc kubenswrapper[4779]: I0929 11:21:32.798345 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-hs7fd_84543eb2-59d1-4d2c-986c-5836e23a4a68/cp-metrics/0.log" Sep 29 11:21:32 crc kubenswrapper[4779]: I0929 11:21:32.971143 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-hs7fd_84543eb2-59d1-4d2c-986c-5836e23a4a68/frr-metrics/0.log" Sep 29 11:21:32 crc kubenswrapper[4779]: I0929 11:21:32.971655 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-hs7fd_84543eb2-59d1-4d2c-986c-5836e23a4a68/kube-rbac-proxy/0.log" Sep 29 11:21:33 crc kubenswrapper[4779]: I0929 11:21:33.021161 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-hs7fd_84543eb2-59d1-4d2c-986c-5836e23a4a68/kube-rbac-proxy-frr/0.log" Sep 29 11:21:33 crc kubenswrapper[4779]: I0929 11:21:33.193966 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-hs7fd_84543eb2-59d1-4d2c-986c-5836e23a4a68/reloader/0.log" Sep 29 11:21:33 crc kubenswrapper[4779]: I0929 11:21:33.222280 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-5478bdb765-j47xm_148080b1-7775-4b6c-ad08-5de4695051bb/frr-k8s-webhook-server/0.log" Sep 29 11:21:33 crc kubenswrapper[4779]: I0929 11:21:33.464996 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-5666db5b47-2v9x9_3386b8db-4ec5-4453-9150-c8697e8b67d4/manager/0.log" Sep 29 11:21:33 crc kubenswrapper[4779]: I0929 11:21:33.627754 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-86cbc99f9-b5b6x_8a35c267-ee63-490b-bf65-f1afb52840cb/webhook-server/0.log" Sep 29 11:21:33 crc kubenswrapper[4779]: I0929 11:21:33.770220 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-n857k_b474caaa-981c-427a-890c-aab91f461a90/kube-rbac-proxy/0.log" Sep 29 11:21:34 crc kubenswrapper[4779]: I0929 11:21:34.514127 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-n857k_b474caaa-981c-427a-890c-aab91f461a90/speaker/0.log" Sep 29 11:21:35 crc kubenswrapper[4779]: I0929 11:21:35.062495 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-hs7fd_84543eb2-59d1-4d2c-986c-5836e23a4a68/frr/0.log" Sep 29 11:21:46 crc kubenswrapper[4779]: I0929 11:21:46.409499 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc4h4pl_2b72d279-81fc-4298-bf86-21cdd90a2bb0/util/0.log" Sep 29 11:21:46 crc kubenswrapper[4779]: I0929 11:21:46.662370 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc4h4pl_2b72d279-81fc-4298-bf86-21cdd90a2bb0/util/0.log" Sep 29 11:21:46 crc kubenswrapper[4779]: I0929 11:21:46.670105 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc4h4pl_2b72d279-81fc-4298-bf86-21cdd90a2bb0/pull/0.log" Sep 29 11:21:46 crc kubenswrapper[4779]: I0929 11:21:46.741032 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc4h4pl_2b72d279-81fc-4298-bf86-21cdd90a2bb0/pull/0.log" Sep 29 11:21:46 crc kubenswrapper[4779]: I0929 11:21:46.954630 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc4h4pl_2b72d279-81fc-4298-bf86-21cdd90a2bb0/extract/0.log" Sep 29 11:21:46 crc kubenswrapper[4779]: I0929 11:21:46.963704 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc4h4pl_2b72d279-81fc-4298-bf86-21cdd90a2bb0/util/0.log" Sep 29 11:21:46 crc kubenswrapper[4779]: I0929 11:21:46.966023 4779 patch_prober.go:28] interesting pod/machine-config-daemon-5lnlv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 11:21:46 crc kubenswrapper[4779]: I0929 11:21:46.966088 4779 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 11:21:46 crc kubenswrapper[4779]: I0929 11:21:46.966150 4779 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" Sep 29 11:21:46 crc kubenswrapper[4779]: I0929 11:21:46.967117 4779 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e97f4046eb906ccaacbf65683e1f4a4e5b7c1d12fcdf2f114dd137565f224364"} pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 11:21:46 crc kubenswrapper[4779]: I0929 11:21:46.967189 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerName="machine-config-daemon" containerID="cri-o://e97f4046eb906ccaacbf65683e1f4a4e5b7c1d12fcdf2f114dd137565f224364" gracePeriod=600 Sep 29 11:21:47 crc kubenswrapper[4779]: I0929 11:21:47.015472 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bc4h4pl_2b72d279-81fc-4298-bf86-21cdd90a2bb0/pull/0.log" Sep 29 11:21:47 crc kubenswrapper[4779]: E0929 11:21:47.093340 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 11:21:47 crc kubenswrapper[4779]: I0929 11:21:47.168368 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtdbfw_4a2dd05c-f021-48d2-9c7a-092ca80dca01/util/0.log" Sep 29 11:21:47 crc kubenswrapper[4779]: I0929 11:21:47.374794 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtdbfw_4a2dd05c-f021-48d2-9c7a-092ca80dca01/pull/0.log" Sep 29 11:21:47 crc kubenswrapper[4779]: I0929 11:21:47.383815 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtdbfw_4a2dd05c-f021-48d2-9c7a-092ca80dca01/util/0.log" Sep 29 11:21:47 crc kubenswrapper[4779]: I0929 11:21:47.418554 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtdbfw_4a2dd05c-f021-48d2-9c7a-092ca80dca01/pull/0.log" Sep 29 11:21:47 crc kubenswrapper[4779]: I0929 11:21:47.689088 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtdbfw_4a2dd05c-f021-48d2-9c7a-092ca80dca01/util/0.log" Sep 29 11:21:47 crc kubenswrapper[4779]: I0929 11:21:47.704963 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtdbfw_4a2dd05c-f021-48d2-9c7a-092ca80dca01/extract/0.log" Sep 29 11:21:47 crc kubenswrapper[4779]: I0929 11:21:47.801927 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dtdbfw_4a2dd05c-f021-48d2-9c7a-092ca80dca01/pull/0.log" Sep 29 11:21:47 crc kubenswrapper[4779]: I0929 11:21:47.972152 4779 generic.go:334] "Generic (PLEG): container finished" podID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" containerID="e97f4046eb906ccaacbf65683e1f4a4e5b7c1d12fcdf2f114dd137565f224364" exitCode=0 Sep 29 11:21:47 crc kubenswrapper[4779]: I0929 11:21:47.972477 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" event={"ID":"f1a5d3a7-37d9-4a87-864c-e4af7f504a19","Type":"ContainerDied","Data":"e97f4046eb906ccaacbf65683e1f4a4e5b7c1d12fcdf2f114dd137565f224364"} Sep 29 11:21:47 crc kubenswrapper[4779]: I0929 11:21:47.972520 4779 scope.go:117] "RemoveContainer" containerID="1a03ce0ceb551acf7dbaa770a33c6c1fb633aca21ecf8c2b971b784f7bf5f2c9" Sep 29 11:21:47 crc kubenswrapper[4779]: I0929 11:21:47.973076 4779 scope.go:117] "RemoveContainer" containerID="e97f4046eb906ccaacbf65683e1f4a4e5b7c1d12fcdf2f114dd137565f224364" Sep 29 11:21:47 crc kubenswrapper[4779]: E0929 11:21:47.973386 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 11:21:47 crc kubenswrapper[4779]: I0929 11:21:47.973426 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-jsh2v_ec71db0f-c295-4583-8314-de4043a1ccdf/extract-utilities/0.log" Sep 29 11:21:48 crc kubenswrapper[4779]: I0929 11:21:48.241099 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-jsh2v_ec71db0f-c295-4583-8314-de4043a1ccdf/extract-content/0.log" Sep 29 11:21:48 crc kubenswrapper[4779]: I0929 11:21:48.272399 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-jsh2v_ec71db0f-c295-4583-8314-de4043a1ccdf/extract-content/0.log" Sep 29 11:21:48 crc kubenswrapper[4779]: I0929 11:21:48.272400 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-jsh2v_ec71db0f-c295-4583-8314-de4043a1ccdf/extract-utilities/0.log" Sep 29 11:21:48 crc kubenswrapper[4779]: I0929 11:21:48.547607 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-jsh2v_ec71db0f-c295-4583-8314-de4043a1ccdf/extract-utilities/0.log" Sep 29 11:21:48 crc kubenswrapper[4779]: I0929 11:21:48.551392 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-jsh2v_ec71db0f-c295-4583-8314-de4043a1ccdf/extract-content/0.log" Sep 29 11:21:48 crc kubenswrapper[4779]: I0929 11:21:48.845722 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-q5kll_b1e9d751-a654-48a0-b585-6865e294ce93/extract-utilities/0.log" Sep 29 11:21:49 crc kubenswrapper[4779]: I0929 11:21:49.241234 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-q5kll_b1e9d751-a654-48a0-b585-6865e294ce93/extract-content/0.log" Sep 29 11:21:49 crc kubenswrapper[4779]: I0929 11:21:49.319660 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-q5kll_b1e9d751-a654-48a0-b585-6865e294ce93/extract-content/0.log" Sep 29 11:21:49 crc kubenswrapper[4779]: I0929 11:21:49.338485 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-q5kll_b1e9d751-a654-48a0-b585-6865e294ce93/extract-utilities/0.log" Sep 29 11:21:49 crc kubenswrapper[4779]: I0929 11:21:49.661474 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-q5kll_b1e9d751-a654-48a0-b585-6865e294ce93/extract-content/0.log" Sep 29 11:21:49 crc kubenswrapper[4779]: I0929 11:21:49.686241 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-jsh2v_ec71db0f-c295-4583-8314-de4043a1ccdf/registry-server/0.log" Sep 29 11:21:49 crc kubenswrapper[4779]: I0929 11:21:49.715451 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-q5kll_b1e9d751-a654-48a0-b585-6865e294ce93/extract-utilities/0.log" Sep 29 11:21:50 crc kubenswrapper[4779]: I0929 11:21:50.005118 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96shphd_7a6d49df-184b-486e-a7c6-5eb0aecab19f/util/0.log" Sep 29 11:21:50 crc kubenswrapper[4779]: I0929 11:21:50.257175 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96shphd_7a6d49df-184b-486e-a7c6-5eb0aecab19f/pull/0.log" Sep 29 11:21:50 crc kubenswrapper[4779]: I0929 11:21:50.387929 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96shphd_7a6d49df-184b-486e-a7c6-5eb0aecab19f/pull/0.log" Sep 29 11:21:50 crc kubenswrapper[4779]: I0929 11:21:50.403476 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96shphd_7a6d49df-184b-486e-a7c6-5eb0aecab19f/util/0.log" Sep 29 11:21:50 crc kubenswrapper[4779]: I0929 11:21:50.768192 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96shphd_7a6d49df-184b-486e-a7c6-5eb0aecab19f/pull/0.log" Sep 29 11:21:50 crc kubenswrapper[4779]: I0929 11:21:50.771882 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96shphd_7a6d49df-184b-486e-a7c6-5eb0aecab19f/extract/0.log" Sep 29 11:21:50 crc kubenswrapper[4779]: I0929 11:21:50.783968 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96shphd_7a6d49df-184b-486e-a7c6-5eb0aecab19f/util/0.log" Sep 29 11:21:50 crc kubenswrapper[4779]: I0929 11:21:50.971232 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-q5kll_b1e9d751-a654-48a0-b585-6865e294ce93/registry-server/0.log" Sep 29 11:21:51 crc kubenswrapper[4779]: I0929 11:21:51.091760 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-jtxkm_c886bf87-12c2-4c35-80de-3bbf58c0df66/marketplace-operator/0.log" Sep 29 11:21:51 crc kubenswrapper[4779]: I0929 11:21:51.097005 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-5rpgs_d65a114a-c403-4e47-9496-eea1f6b50f8a/extract-utilities/0.log" Sep 29 11:21:51 crc kubenswrapper[4779]: I0929 11:21:51.251275 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-5rpgs_d65a114a-c403-4e47-9496-eea1f6b50f8a/extract-utilities/0.log" Sep 29 11:21:51 crc kubenswrapper[4779]: I0929 11:21:51.291052 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-5rpgs_d65a114a-c403-4e47-9496-eea1f6b50f8a/extract-content/0.log" Sep 29 11:21:51 crc kubenswrapper[4779]: I0929 11:21:51.311854 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-5rpgs_d65a114a-c403-4e47-9496-eea1f6b50f8a/extract-content/0.log" Sep 29 11:21:51 crc kubenswrapper[4779]: I0929 11:21:51.568987 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-zc2kj_d0c0ff81-b037-480d-8ab4-dfe1d6703938/extract-utilities/0.log" Sep 29 11:21:51 crc kubenswrapper[4779]: I0929 11:21:51.575786 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-5rpgs_d65a114a-c403-4e47-9496-eea1f6b50f8a/extract-content/0.log" Sep 29 11:21:51 crc kubenswrapper[4779]: I0929 11:21:51.594458 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-5rpgs_d65a114a-c403-4e47-9496-eea1f6b50f8a/extract-utilities/0.log" Sep 29 11:21:51 crc kubenswrapper[4779]: I0929 11:21:51.712799 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-5rpgs_d65a114a-c403-4e47-9496-eea1f6b50f8a/registry-server/0.log" Sep 29 11:21:51 crc kubenswrapper[4779]: I0929 11:21:51.888863 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-zc2kj_d0c0ff81-b037-480d-8ab4-dfe1d6703938/extract-utilities/0.log" Sep 29 11:21:51 crc kubenswrapper[4779]: I0929 11:21:51.888949 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-zc2kj_d0c0ff81-b037-480d-8ab4-dfe1d6703938/extract-content/0.log" Sep 29 11:21:51 crc kubenswrapper[4779]: I0929 11:21:51.901087 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-zc2kj_d0c0ff81-b037-480d-8ab4-dfe1d6703938/extract-content/0.log" Sep 29 11:21:52 crc kubenswrapper[4779]: I0929 11:21:52.064360 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-zc2kj_d0c0ff81-b037-480d-8ab4-dfe1d6703938/extract-utilities/0.log" Sep 29 11:21:52 crc kubenswrapper[4779]: I0929 11:21:52.065715 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-zc2kj_d0c0ff81-b037-480d-8ab4-dfe1d6703938/extract-content/0.log" Sep 29 11:21:52 crc kubenswrapper[4779]: I0929 11:21:52.288791 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-zc2kj_d0c0ff81-b037-480d-8ab4-dfe1d6703938/registry-server/0.log" Sep 29 11:21:59 crc kubenswrapper[4779]: I0929 11:21:59.714607 4779 scope.go:117] "RemoveContainer" containerID="e97f4046eb906ccaacbf65683e1f4a4e5b7c1d12fcdf2f114dd137565f224364" Sep 29 11:21:59 crc kubenswrapper[4779]: E0929 11:21:59.715473 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 11:22:04 crc kubenswrapper[4779]: I0929 11:22:04.175415 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-7c8cf85677-vgkdn_31f19cee-69f8-4016-9025-35a73798bc5f/prometheus-operator/0.log" Sep 29 11:22:04 crc kubenswrapper[4779]: I0929 11:22:04.357777 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-69fc696f78-52x4j_c314497a-d9a7-4364-93cd-924a0b1f2de4/prometheus-operator-admission-webhook/0.log" Sep 29 11:22:04 crc kubenswrapper[4779]: I0929 11:22:04.395274 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-69fc696f78-8fjp6_588bdbef-217b-4bf0-84ba-ae1d0fb8a80b/prometheus-operator-admission-webhook/0.log" Sep 29 11:22:04 crc kubenswrapper[4779]: I0929 11:22:04.576127 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-cc5f78dfc-42c95_d21259d9-d1c5-4d70-815d-045f775f09bc/operator/0.log" Sep 29 11:22:04 crc kubenswrapper[4779]: I0929 11:22:04.635248 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-54bc95c9fb-9s4b8_030213d9-0575-48e7-b472-dd72d07ecbc8/perses-operator/0.log" Sep 29 11:22:13 crc kubenswrapper[4779]: I0929 11:22:13.718604 4779 scope.go:117] "RemoveContainer" containerID="e97f4046eb906ccaacbf65683e1f4a4e5b7c1d12fcdf2f114dd137565f224364" Sep 29 11:22:13 crc kubenswrapper[4779]: E0929 11:22:13.720278 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 11:22:27 crc kubenswrapper[4779]: I0929 11:22:27.714215 4779 scope.go:117] "RemoveContainer" containerID="e97f4046eb906ccaacbf65683e1f4a4e5b7c1d12fcdf2f114dd137565f224364" Sep 29 11:22:27 crc kubenswrapper[4779]: E0929 11:22:27.714938 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 11:22:42 crc kubenswrapper[4779]: I0929 11:22:42.715258 4779 scope.go:117] "RemoveContainer" containerID="e97f4046eb906ccaacbf65683e1f4a4e5b7c1d12fcdf2f114dd137565f224364" Sep 29 11:22:42 crc kubenswrapper[4779]: E0929 11:22:42.716133 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 11:22:55 crc kubenswrapper[4779]: I0929 11:22:55.714834 4779 scope.go:117] "RemoveContainer" containerID="e97f4046eb906ccaacbf65683e1f4a4e5b7c1d12fcdf2f114dd137565f224364" Sep 29 11:22:55 crc kubenswrapper[4779]: E0929 11:22:55.715736 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 11:23:06 crc kubenswrapper[4779]: I0929 11:23:06.715656 4779 scope.go:117] "RemoveContainer" containerID="e97f4046eb906ccaacbf65683e1f4a4e5b7c1d12fcdf2f114dd137565f224364" Sep 29 11:23:06 crc kubenswrapper[4779]: E0929 11:23:06.716710 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 11:23:20 crc kubenswrapper[4779]: I0929 11:23:20.714898 4779 scope.go:117] "RemoveContainer" containerID="e97f4046eb906ccaacbf65683e1f4a4e5b7c1d12fcdf2f114dd137565f224364" Sep 29 11:23:20 crc kubenswrapper[4779]: E0929 11:23:20.715823 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 11:23:31 crc kubenswrapper[4779]: I0929 11:23:31.715032 4779 scope.go:117] "RemoveContainer" containerID="e97f4046eb906ccaacbf65683e1f4a4e5b7c1d12fcdf2f114dd137565f224364" Sep 29 11:23:31 crc kubenswrapper[4779]: E0929 11:23:31.716043 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 11:23:42 crc kubenswrapper[4779]: I0929 11:23:42.714760 4779 scope.go:117] "RemoveContainer" containerID="e97f4046eb906ccaacbf65683e1f4a4e5b7c1d12fcdf2f114dd137565f224364" Sep 29 11:23:42 crc kubenswrapper[4779]: E0929 11:23:42.715608 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 11:23:55 crc kubenswrapper[4779]: I0929 11:23:55.728292 4779 scope.go:117] "RemoveContainer" containerID="e97f4046eb906ccaacbf65683e1f4a4e5b7c1d12fcdf2f114dd137565f224364" Sep 29 11:23:55 crc kubenswrapper[4779]: E0929 11:23:55.730954 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 11:24:10 crc kubenswrapper[4779]: I0929 11:24:10.714508 4779 scope.go:117] "RemoveContainer" containerID="e97f4046eb906ccaacbf65683e1f4a4e5b7c1d12fcdf2f114dd137565f224364" Sep 29 11:24:10 crc kubenswrapper[4779]: E0929 11:24:10.715280 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 11:24:21 crc kubenswrapper[4779]: I0929 11:24:21.714844 4779 scope.go:117] "RemoveContainer" containerID="e97f4046eb906ccaacbf65683e1f4a4e5b7c1d12fcdf2f114dd137565f224364" Sep 29 11:24:21 crc kubenswrapper[4779]: E0929 11:24:21.715852 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 11:24:26 crc kubenswrapper[4779]: I0929 11:24:26.346519 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-4v5km"] Sep 29 11:24:26 crc kubenswrapper[4779]: E0929 11:24:26.347806 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9fa8c0c-f285-4ab7-8f91-d584a79180ca" containerName="container-00" Sep 29 11:24:26 crc kubenswrapper[4779]: I0929 11:24:26.347824 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9fa8c0c-f285-4ab7-8f91-d584a79180ca" containerName="container-00" Sep 29 11:24:26 crc kubenswrapper[4779]: I0929 11:24:26.348177 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="a9fa8c0c-f285-4ab7-8f91-d584a79180ca" containerName="container-00" Sep 29 11:24:26 crc kubenswrapper[4779]: I0929 11:24:26.351547 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4v5km" Sep 29 11:24:26 crc kubenswrapper[4779]: I0929 11:24:26.368064 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4v5km"] Sep 29 11:24:26 crc kubenswrapper[4779]: I0929 11:24:26.461113 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2af918f7-7492-4f15-8ba2-ffa39901b2ee-utilities\") pod \"community-operators-4v5km\" (UID: \"2af918f7-7492-4f15-8ba2-ffa39901b2ee\") " pod="openshift-marketplace/community-operators-4v5km" Sep 29 11:24:26 crc kubenswrapper[4779]: I0929 11:24:26.461500 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mtxcv\" (UniqueName: \"kubernetes.io/projected/2af918f7-7492-4f15-8ba2-ffa39901b2ee-kube-api-access-mtxcv\") pod \"community-operators-4v5km\" (UID: \"2af918f7-7492-4f15-8ba2-ffa39901b2ee\") " pod="openshift-marketplace/community-operators-4v5km" Sep 29 11:24:26 crc kubenswrapper[4779]: I0929 11:24:26.461616 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2af918f7-7492-4f15-8ba2-ffa39901b2ee-catalog-content\") pod \"community-operators-4v5km\" (UID: \"2af918f7-7492-4f15-8ba2-ffa39901b2ee\") " pod="openshift-marketplace/community-operators-4v5km" Sep 29 11:24:26 crc kubenswrapper[4779]: I0929 11:24:26.563559 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2af918f7-7492-4f15-8ba2-ffa39901b2ee-utilities\") pod \"community-operators-4v5km\" (UID: \"2af918f7-7492-4f15-8ba2-ffa39901b2ee\") " pod="openshift-marketplace/community-operators-4v5km" Sep 29 11:24:26 crc kubenswrapper[4779]: I0929 11:24:26.563996 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mtxcv\" (UniqueName: \"kubernetes.io/projected/2af918f7-7492-4f15-8ba2-ffa39901b2ee-kube-api-access-mtxcv\") pod \"community-operators-4v5km\" (UID: \"2af918f7-7492-4f15-8ba2-ffa39901b2ee\") " pod="openshift-marketplace/community-operators-4v5km" Sep 29 11:24:26 crc kubenswrapper[4779]: I0929 11:24:26.564179 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2af918f7-7492-4f15-8ba2-ffa39901b2ee-catalog-content\") pod \"community-operators-4v5km\" (UID: \"2af918f7-7492-4f15-8ba2-ffa39901b2ee\") " pod="openshift-marketplace/community-operators-4v5km" Sep 29 11:24:26 crc kubenswrapper[4779]: I0929 11:24:26.564491 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2af918f7-7492-4f15-8ba2-ffa39901b2ee-utilities\") pod \"community-operators-4v5km\" (UID: \"2af918f7-7492-4f15-8ba2-ffa39901b2ee\") " pod="openshift-marketplace/community-operators-4v5km" Sep 29 11:24:26 crc kubenswrapper[4779]: I0929 11:24:26.564665 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2af918f7-7492-4f15-8ba2-ffa39901b2ee-catalog-content\") pod \"community-operators-4v5km\" (UID: \"2af918f7-7492-4f15-8ba2-ffa39901b2ee\") " pod="openshift-marketplace/community-operators-4v5km" Sep 29 11:24:26 crc kubenswrapper[4779]: I0929 11:24:26.594928 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mtxcv\" (UniqueName: \"kubernetes.io/projected/2af918f7-7492-4f15-8ba2-ffa39901b2ee-kube-api-access-mtxcv\") pod \"community-operators-4v5km\" (UID: \"2af918f7-7492-4f15-8ba2-ffa39901b2ee\") " pod="openshift-marketplace/community-operators-4v5km" Sep 29 11:24:26 crc kubenswrapper[4779]: I0929 11:24:26.678570 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4v5km" Sep 29 11:24:27 crc kubenswrapper[4779]: I0929 11:24:27.375339 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4v5km"] Sep 29 11:24:27 crc kubenswrapper[4779]: I0929 11:24:27.652574 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4v5km" event={"ID":"2af918f7-7492-4f15-8ba2-ffa39901b2ee","Type":"ContainerStarted","Data":"2fe3b546df592a47e2754c97c74720282accd3ce788c5b9391db617166540f47"} Sep 29 11:24:28 crc kubenswrapper[4779]: I0929 11:24:28.664946 4779 generic.go:334] "Generic (PLEG): container finished" podID="2af918f7-7492-4f15-8ba2-ffa39901b2ee" containerID="4495eb5c9f0bf5c3f8204e265e1b42f09a50a09d6bf44da2d98c27834dc012c5" exitCode=0 Sep 29 11:24:28 crc kubenswrapper[4779]: I0929 11:24:28.664998 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4v5km" event={"ID":"2af918f7-7492-4f15-8ba2-ffa39901b2ee","Type":"ContainerDied","Data":"4495eb5c9f0bf5c3f8204e265e1b42f09a50a09d6bf44da2d98c27834dc012c5"} Sep 29 11:24:28 crc kubenswrapper[4779]: I0929 11:24:28.667936 4779 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 11:24:30 crc kubenswrapper[4779]: I0929 11:24:30.686123 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4v5km" event={"ID":"2af918f7-7492-4f15-8ba2-ffa39901b2ee","Type":"ContainerStarted","Data":"e4401e244635a29e6ea1eb6f18dee52524a31d0caf8665233e24fdf6bdcdaddb"} Sep 29 11:24:31 crc kubenswrapper[4779]: I0929 11:24:31.700849 4779 generic.go:334] "Generic (PLEG): container finished" podID="2af918f7-7492-4f15-8ba2-ffa39901b2ee" containerID="e4401e244635a29e6ea1eb6f18dee52524a31d0caf8665233e24fdf6bdcdaddb" exitCode=0 Sep 29 11:24:31 crc kubenswrapper[4779]: I0929 11:24:31.700942 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4v5km" event={"ID":"2af918f7-7492-4f15-8ba2-ffa39901b2ee","Type":"ContainerDied","Data":"e4401e244635a29e6ea1eb6f18dee52524a31d0caf8665233e24fdf6bdcdaddb"} Sep 29 11:24:33 crc kubenswrapper[4779]: I0929 11:24:33.748389 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4v5km" event={"ID":"2af918f7-7492-4f15-8ba2-ffa39901b2ee","Type":"ContainerStarted","Data":"17ada29f8fef4eee12cf4f9be601b13ad26070757e1ded22cfcf4f3a90eb3e3e"} Sep 29 11:24:33 crc kubenswrapper[4779]: I0929 11:24:33.768787 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-4v5km" podStartSLOduration=3.943032968 podStartE2EDuration="7.768762667s" podCreationTimestamp="2025-09-29 11:24:26 +0000 UTC" firstStartedPulling="2025-09-29 11:24:28.667621634 +0000 UTC m=+6900.648945538" lastFinishedPulling="2025-09-29 11:24:32.493351333 +0000 UTC m=+6904.474675237" observedRunningTime="2025-09-29 11:24:33.763321419 +0000 UTC m=+6905.744645343" watchObservedRunningTime="2025-09-29 11:24:33.768762667 +0000 UTC m=+6905.750086561" Sep 29 11:24:35 crc kubenswrapper[4779]: I0929 11:24:35.714571 4779 scope.go:117] "RemoveContainer" containerID="e97f4046eb906ccaacbf65683e1f4a4e5b7c1d12fcdf2f114dd137565f224364" Sep 29 11:24:35 crc kubenswrapper[4779]: E0929 11:24:35.715131 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 11:24:36 crc kubenswrapper[4779]: I0929 11:24:36.679032 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-4v5km" Sep 29 11:24:36 crc kubenswrapper[4779]: I0929 11:24:36.679373 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-4v5km" Sep 29 11:24:36 crc kubenswrapper[4779]: I0929 11:24:36.735364 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-4v5km" Sep 29 11:24:46 crc kubenswrapper[4779]: I0929 11:24:46.732986 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-4v5km" Sep 29 11:24:46 crc kubenswrapper[4779]: I0929 11:24:46.784394 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-4v5km"] Sep 29 11:24:46 crc kubenswrapper[4779]: I0929 11:24:46.895245 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-4v5km" podUID="2af918f7-7492-4f15-8ba2-ffa39901b2ee" containerName="registry-server" containerID="cri-o://17ada29f8fef4eee12cf4f9be601b13ad26070757e1ded22cfcf4f3a90eb3e3e" gracePeriod=2 Sep 29 11:24:47 crc kubenswrapper[4779]: I0929 11:24:47.402118 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4v5km" Sep 29 11:24:47 crc kubenswrapper[4779]: I0929 11:24:47.500593 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2af918f7-7492-4f15-8ba2-ffa39901b2ee-catalog-content\") pod \"2af918f7-7492-4f15-8ba2-ffa39901b2ee\" (UID: \"2af918f7-7492-4f15-8ba2-ffa39901b2ee\") " Sep 29 11:24:47 crc kubenswrapper[4779]: I0929 11:24:47.500705 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2af918f7-7492-4f15-8ba2-ffa39901b2ee-utilities\") pod \"2af918f7-7492-4f15-8ba2-ffa39901b2ee\" (UID: \"2af918f7-7492-4f15-8ba2-ffa39901b2ee\") " Sep 29 11:24:47 crc kubenswrapper[4779]: I0929 11:24:47.500856 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mtxcv\" (UniqueName: \"kubernetes.io/projected/2af918f7-7492-4f15-8ba2-ffa39901b2ee-kube-api-access-mtxcv\") pod \"2af918f7-7492-4f15-8ba2-ffa39901b2ee\" (UID: \"2af918f7-7492-4f15-8ba2-ffa39901b2ee\") " Sep 29 11:24:47 crc kubenswrapper[4779]: I0929 11:24:47.503794 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2af918f7-7492-4f15-8ba2-ffa39901b2ee-utilities" (OuterVolumeSpecName: "utilities") pod "2af918f7-7492-4f15-8ba2-ffa39901b2ee" (UID: "2af918f7-7492-4f15-8ba2-ffa39901b2ee"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 11:24:47 crc kubenswrapper[4779]: I0929 11:24:47.526699 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2af918f7-7492-4f15-8ba2-ffa39901b2ee-kube-api-access-mtxcv" (OuterVolumeSpecName: "kube-api-access-mtxcv") pod "2af918f7-7492-4f15-8ba2-ffa39901b2ee" (UID: "2af918f7-7492-4f15-8ba2-ffa39901b2ee"). InnerVolumeSpecName "kube-api-access-mtxcv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 11:24:47 crc kubenswrapper[4779]: I0929 11:24:47.557336 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2af918f7-7492-4f15-8ba2-ffa39901b2ee-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2af918f7-7492-4f15-8ba2-ffa39901b2ee" (UID: "2af918f7-7492-4f15-8ba2-ffa39901b2ee"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 11:24:47 crc kubenswrapper[4779]: I0929 11:24:47.602687 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mtxcv\" (UniqueName: \"kubernetes.io/projected/2af918f7-7492-4f15-8ba2-ffa39901b2ee-kube-api-access-mtxcv\") on node \"crc\" DevicePath \"\"" Sep 29 11:24:47 crc kubenswrapper[4779]: I0929 11:24:47.602732 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2af918f7-7492-4f15-8ba2-ffa39901b2ee-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 11:24:47 crc kubenswrapper[4779]: I0929 11:24:47.602745 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2af918f7-7492-4f15-8ba2-ffa39901b2ee-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 11:24:47 crc kubenswrapper[4779]: I0929 11:24:47.913112 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4v5km" Sep 29 11:24:47 crc kubenswrapper[4779]: I0929 11:24:47.913107 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4v5km" event={"ID":"2af918f7-7492-4f15-8ba2-ffa39901b2ee","Type":"ContainerDied","Data":"17ada29f8fef4eee12cf4f9be601b13ad26070757e1ded22cfcf4f3a90eb3e3e"} Sep 29 11:24:47 crc kubenswrapper[4779]: I0929 11:24:47.913256 4779 scope.go:117] "RemoveContainer" containerID="17ada29f8fef4eee12cf4f9be601b13ad26070757e1ded22cfcf4f3a90eb3e3e" Sep 29 11:24:47 crc kubenswrapper[4779]: I0929 11:24:47.913110 4779 generic.go:334] "Generic (PLEG): container finished" podID="2af918f7-7492-4f15-8ba2-ffa39901b2ee" containerID="17ada29f8fef4eee12cf4f9be601b13ad26070757e1ded22cfcf4f3a90eb3e3e" exitCode=0 Sep 29 11:24:47 crc kubenswrapper[4779]: I0929 11:24:47.913368 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4v5km" event={"ID":"2af918f7-7492-4f15-8ba2-ffa39901b2ee","Type":"ContainerDied","Data":"2fe3b546df592a47e2754c97c74720282accd3ce788c5b9391db617166540f47"} Sep 29 11:24:47 crc kubenswrapper[4779]: I0929 11:24:47.945542 4779 scope.go:117] "RemoveContainer" containerID="e4401e244635a29e6ea1eb6f18dee52524a31d0caf8665233e24fdf6bdcdaddb" Sep 29 11:24:47 crc kubenswrapper[4779]: I0929 11:24:47.989004 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-4v5km"] Sep 29 11:24:48 crc kubenswrapper[4779]: I0929 11:24:48.001257 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-4v5km"] Sep 29 11:24:48 crc kubenswrapper[4779]: I0929 11:24:48.005598 4779 scope.go:117] "RemoveContainer" containerID="4495eb5c9f0bf5c3f8204e265e1b42f09a50a09d6bf44da2d98c27834dc012c5" Sep 29 11:24:48 crc kubenswrapper[4779]: I0929 11:24:48.043464 4779 scope.go:117] "RemoveContainer" containerID="17ada29f8fef4eee12cf4f9be601b13ad26070757e1ded22cfcf4f3a90eb3e3e" Sep 29 11:24:48 crc kubenswrapper[4779]: E0929 11:24:48.043917 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"17ada29f8fef4eee12cf4f9be601b13ad26070757e1ded22cfcf4f3a90eb3e3e\": container with ID starting with 17ada29f8fef4eee12cf4f9be601b13ad26070757e1ded22cfcf4f3a90eb3e3e not found: ID does not exist" containerID="17ada29f8fef4eee12cf4f9be601b13ad26070757e1ded22cfcf4f3a90eb3e3e" Sep 29 11:24:48 crc kubenswrapper[4779]: I0929 11:24:48.043963 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"17ada29f8fef4eee12cf4f9be601b13ad26070757e1ded22cfcf4f3a90eb3e3e"} err="failed to get container status \"17ada29f8fef4eee12cf4f9be601b13ad26070757e1ded22cfcf4f3a90eb3e3e\": rpc error: code = NotFound desc = could not find container \"17ada29f8fef4eee12cf4f9be601b13ad26070757e1ded22cfcf4f3a90eb3e3e\": container with ID starting with 17ada29f8fef4eee12cf4f9be601b13ad26070757e1ded22cfcf4f3a90eb3e3e not found: ID does not exist" Sep 29 11:24:48 crc kubenswrapper[4779]: I0929 11:24:48.043997 4779 scope.go:117] "RemoveContainer" containerID="e4401e244635a29e6ea1eb6f18dee52524a31d0caf8665233e24fdf6bdcdaddb" Sep 29 11:24:48 crc kubenswrapper[4779]: E0929 11:24:48.044283 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e4401e244635a29e6ea1eb6f18dee52524a31d0caf8665233e24fdf6bdcdaddb\": container with ID starting with e4401e244635a29e6ea1eb6f18dee52524a31d0caf8665233e24fdf6bdcdaddb not found: ID does not exist" containerID="e4401e244635a29e6ea1eb6f18dee52524a31d0caf8665233e24fdf6bdcdaddb" Sep 29 11:24:48 crc kubenswrapper[4779]: I0929 11:24:48.044313 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e4401e244635a29e6ea1eb6f18dee52524a31d0caf8665233e24fdf6bdcdaddb"} err="failed to get container status \"e4401e244635a29e6ea1eb6f18dee52524a31d0caf8665233e24fdf6bdcdaddb\": rpc error: code = NotFound desc = could not find container \"e4401e244635a29e6ea1eb6f18dee52524a31d0caf8665233e24fdf6bdcdaddb\": container with ID starting with e4401e244635a29e6ea1eb6f18dee52524a31d0caf8665233e24fdf6bdcdaddb not found: ID does not exist" Sep 29 11:24:48 crc kubenswrapper[4779]: I0929 11:24:48.044333 4779 scope.go:117] "RemoveContainer" containerID="4495eb5c9f0bf5c3f8204e265e1b42f09a50a09d6bf44da2d98c27834dc012c5" Sep 29 11:24:48 crc kubenswrapper[4779]: E0929 11:24:48.044692 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4495eb5c9f0bf5c3f8204e265e1b42f09a50a09d6bf44da2d98c27834dc012c5\": container with ID starting with 4495eb5c9f0bf5c3f8204e265e1b42f09a50a09d6bf44da2d98c27834dc012c5 not found: ID does not exist" containerID="4495eb5c9f0bf5c3f8204e265e1b42f09a50a09d6bf44da2d98c27834dc012c5" Sep 29 11:24:48 crc kubenswrapper[4779]: I0929 11:24:48.044723 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4495eb5c9f0bf5c3f8204e265e1b42f09a50a09d6bf44da2d98c27834dc012c5"} err="failed to get container status \"4495eb5c9f0bf5c3f8204e265e1b42f09a50a09d6bf44da2d98c27834dc012c5\": rpc error: code = NotFound desc = could not find container \"4495eb5c9f0bf5c3f8204e265e1b42f09a50a09d6bf44da2d98c27834dc012c5\": container with ID starting with 4495eb5c9f0bf5c3f8204e265e1b42f09a50a09d6bf44da2d98c27834dc012c5 not found: ID does not exist" Sep 29 11:24:48 crc kubenswrapper[4779]: I0929 11:24:48.722768 4779 scope.go:117] "RemoveContainer" containerID="e97f4046eb906ccaacbf65683e1f4a4e5b7c1d12fcdf2f114dd137565f224364" Sep 29 11:24:48 crc kubenswrapper[4779]: E0929 11:24:48.723145 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 11:24:48 crc kubenswrapper[4779]: I0929 11:24:48.726221 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2af918f7-7492-4f15-8ba2-ffa39901b2ee" path="/var/lib/kubelet/pods/2af918f7-7492-4f15-8ba2-ffa39901b2ee/volumes" Sep 29 11:24:53 crc kubenswrapper[4779]: I0929 11:24:53.974818 4779 generic.go:334] "Generic (PLEG): container finished" podID="a8ec40cf-67b3-436a-a55b-1f7472e8ad29" containerID="aafadc2d00f2e109d652f3c98dcec7dbe9eac3221b35fbdf3101631813d77986" exitCode=0 Sep 29 11:24:53 crc kubenswrapper[4779]: I0929 11:24:53.974890 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-pnns5/must-gather-kxppc" event={"ID":"a8ec40cf-67b3-436a-a55b-1f7472e8ad29","Type":"ContainerDied","Data":"aafadc2d00f2e109d652f3c98dcec7dbe9eac3221b35fbdf3101631813d77986"} Sep 29 11:24:53 crc kubenswrapper[4779]: I0929 11:24:53.976370 4779 scope.go:117] "RemoveContainer" containerID="aafadc2d00f2e109d652f3c98dcec7dbe9eac3221b35fbdf3101631813d77986" Sep 29 11:24:54 crc kubenswrapper[4779]: I0929 11:24:54.412413 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-pnns5_must-gather-kxppc_a8ec40cf-67b3-436a-a55b-1f7472e8ad29/gather/0.log" Sep 29 11:24:59 crc kubenswrapper[4779]: I0929 11:24:59.715136 4779 scope.go:117] "RemoveContainer" containerID="e97f4046eb906ccaacbf65683e1f4a4e5b7c1d12fcdf2f114dd137565f224364" Sep 29 11:24:59 crc kubenswrapper[4779]: E0929 11:24:59.716015 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 11:25:02 crc kubenswrapper[4779]: I0929 11:25:02.883135 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-pnns5/must-gather-kxppc"] Sep 29 11:25:02 crc kubenswrapper[4779]: I0929 11:25:02.884876 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-pnns5/must-gather-kxppc" podUID="a8ec40cf-67b3-436a-a55b-1f7472e8ad29" containerName="copy" containerID="cri-o://b9d95caf1effffa088ea89ddaf71dd28d40079b8b1b1c856c198b36cefffa9e7" gracePeriod=2 Sep 29 11:25:02 crc kubenswrapper[4779]: I0929 11:25:02.893835 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-pnns5/must-gather-kxppc"] Sep 29 11:25:03 crc kubenswrapper[4779]: I0929 11:25:03.067152 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-pnns5_must-gather-kxppc_a8ec40cf-67b3-436a-a55b-1f7472e8ad29/copy/0.log" Sep 29 11:25:03 crc kubenswrapper[4779]: I0929 11:25:03.067668 4779 generic.go:334] "Generic (PLEG): container finished" podID="a8ec40cf-67b3-436a-a55b-1f7472e8ad29" containerID="b9d95caf1effffa088ea89ddaf71dd28d40079b8b1b1c856c198b36cefffa9e7" exitCode=143 Sep 29 11:25:03 crc kubenswrapper[4779]: I0929 11:25:03.381664 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-pnns5_must-gather-kxppc_a8ec40cf-67b3-436a-a55b-1f7472e8ad29/copy/0.log" Sep 29 11:25:03 crc kubenswrapper[4779]: I0929 11:25:03.382511 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-pnns5/must-gather-kxppc" Sep 29 11:25:03 crc kubenswrapper[4779]: I0929 11:25:03.560233 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/a8ec40cf-67b3-436a-a55b-1f7472e8ad29-must-gather-output\") pod \"a8ec40cf-67b3-436a-a55b-1f7472e8ad29\" (UID: \"a8ec40cf-67b3-436a-a55b-1f7472e8ad29\") " Sep 29 11:25:03 crc kubenswrapper[4779]: I0929 11:25:03.561034 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n4w6m\" (UniqueName: \"kubernetes.io/projected/a8ec40cf-67b3-436a-a55b-1f7472e8ad29-kube-api-access-n4w6m\") pod \"a8ec40cf-67b3-436a-a55b-1f7472e8ad29\" (UID: \"a8ec40cf-67b3-436a-a55b-1f7472e8ad29\") " Sep 29 11:25:03 crc kubenswrapper[4779]: I0929 11:25:03.566528 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a8ec40cf-67b3-436a-a55b-1f7472e8ad29-kube-api-access-n4w6m" (OuterVolumeSpecName: "kube-api-access-n4w6m") pod "a8ec40cf-67b3-436a-a55b-1f7472e8ad29" (UID: "a8ec40cf-67b3-436a-a55b-1f7472e8ad29"). InnerVolumeSpecName "kube-api-access-n4w6m". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 11:25:03 crc kubenswrapper[4779]: I0929 11:25:03.664920 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n4w6m\" (UniqueName: \"kubernetes.io/projected/a8ec40cf-67b3-436a-a55b-1f7472e8ad29-kube-api-access-n4w6m\") on node \"crc\" DevicePath \"\"" Sep 29 11:25:03 crc kubenswrapper[4779]: I0929 11:25:03.796339 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a8ec40cf-67b3-436a-a55b-1f7472e8ad29-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "a8ec40cf-67b3-436a-a55b-1f7472e8ad29" (UID: "a8ec40cf-67b3-436a-a55b-1f7472e8ad29"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 11:25:03 crc kubenswrapper[4779]: I0929 11:25:03.872130 4779 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/a8ec40cf-67b3-436a-a55b-1f7472e8ad29-must-gather-output\") on node \"crc\" DevicePath \"\"" Sep 29 11:25:04 crc kubenswrapper[4779]: I0929 11:25:04.082570 4779 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-pnns5_must-gather-kxppc_a8ec40cf-67b3-436a-a55b-1f7472e8ad29/copy/0.log" Sep 29 11:25:04 crc kubenswrapper[4779]: I0929 11:25:04.083314 4779 scope.go:117] "RemoveContainer" containerID="b9d95caf1effffa088ea89ddaf71dd28d40079b8b1b1c856c198b36cefffa9e7" Sep 29 11:25:04 crc kubenswrapper[4779]: I0929 11:25:04.083372 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-pnns5/must-gather-kxppc" Sep 29 11:25:04 crc kubenswrapper[4779]: I0929 11:25:04.115234 4779 scope.go:117] "RemoveContainer" containerID="aafadc2d00f2e109d652f3c98dcec7dbe9eac3221b35fbdf3101631813d77986" Sep 29 11:25:04 crc kubenswrapper[4779]: I0929 11:25:04.730491 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a8ec40cf-67b3-436a-a55b-1f7472e8ad29" path="/var/lib/kubelet/pods/a8ec40cf-67b3-436a-a55b-1f7472e8ad29/volumes" Sep 29 11:25:10 crc kubenswrapper[4779]: I0929 11:25:10.463594 4779 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-82ghw"] Sep 29 11:25:10 crc kubenswrapper[4779]: E0929 11:25:10.464661 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2af918f7-7492-4f15-8ba2-ffa39901b2ee" containerName="extract-utilities" Sep 29 11:25:10 crc kubenswrapper[4779]: I0929 11:25:10.464679 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="2af918f7-7492-4f15-8ba2-ffa39901b2ee" containerName="extract-utilities" Sep 29 11:25:10 crc kubenswrapper[4779]: E0929 11:25:10.464692 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8ec40cf-67b3-436a-a55b-1f7472e8ad29" containerName="copy" Sep 29 11:25:10 crc kubenswrapper[4779]: I0929 11:25:10.464721 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8ec40cf-67b3-436a-a55b-1f7472e8ad29" containerName="copy" Sep 29 11:25:10 crc kubenswrapper[4779]: E0929 11:25:10.464770 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2af918f7-7492-4f15-8ba2-ffa39901b2ee" containerName="extract-content" Sep 29 11:25:10 crc kubenswrapper[4779]: I0929 11:25:10.464778 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="2af918f7-7492-4f15-8ba2-ffa39901b2ee" containerName="extract-content" Sep 29 11:25:10 crc kubenswrapper[4779]: E0929 11:25:10.464787 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8ec40cf-67b3-436a-a55b-1f7472e8ad29" containerName="gather" Sep 29 11:25:10 crc kubenswrapper[4779]: I0929 11:25:10.464794 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8ec40cf-67b3-436a-a55b-1f7472e8ad29" containerName="gather" Sep 29 11:25:10 crc kubenswrapper[4779]: E0929 11:25:10.464809 4779 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2af918f7-7492-4f15-8ba2-ffa39901b2ee" containerName="registry-server" Sep 29 11:25:10 crc kubenswrapper[4779]: I0929 11:25:10.464815 4779 state_mem.go:107] "Deleted CPUSet assignment" podUID="2af918f7-7492-4f15-8ba2-ffa39901b2ee" containerName="registry-server" Sep 29 11:25:10 crc kubenswrapper[4779]: I0929 11:25:10.465133 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="2af918f7-7492-4f15-8ba2-ffa39901b2ee" containerName="registry-server" Sep 29 11:25:10 crc kubenswrapper[4779]: I0929 11:25:10.465147 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8ec40cf-67b3-436a-a55b-1f7472e8ad29" containerName="copy" Sep 29 11:25:10 crc kubenswrapper[4779]: I0929 11:25:10.465172 4779 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8ec40cf-67b3-436a-a55b-1f7472e8ad29" containerName="gather" Sep 29 11:25:10 crc kubenswrapper[4779]: I0929 11:25:10.470152 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-82ghw" Sep 29 11:25:10 crc kubenswrapper[4779]: I0929 11:25:10.478209 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-82ghw"] Sep 29 11:25:10 crc kubenswrapper[4779]: I0929 11:25:10.556784 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4ad48f7-e68b-4e17-a36b-77ad5f8d1aa4-catalog-content\") pod \"redhat-operators-82ghw\" (UID: \"d4ad48f7-e68b-4e17-a36b-77ad5f8d1aa4\") " pod="openshift-marketplace/redhat-operators-82ghw" Sep 29 11:25:10 crc kubenswrapper[4779]: I0929 11:25:10.557153 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cbv99\" (UniqueName: \"kubernetes.io/projected/d4ad48f7-e68b-4e17-a36b-77ad5f8d1aa4-kube-api-access-cbv99\") pod \"redhat-operators-82ghw\" (UID: \"d4ad48f7-e68b-4e17-a36b-77ad5f8d1aa4\") " pod="openshift-marketplace/redhat-operators-82ghw" Sep 29 11:25:10 crc kubenswrapper[4779]: I0929 11:25:10.557392 4779 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4ad48f7-e68b-4e17-a36b-77ad5f8d1aa4-utilities\") pod \"redhat-operators-82ghw\" (UID: \"d4ad48f7-e68b-4e17-a36b-77ad5f8d1aa4\") " pod="openshift-marketplace/redhat-operators-82ghw" Sep 29 11:25:10 crc kubenswrapper[4779]: I0929 11:25:10.659635 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4ad48f7-e68b-4e17-a36b-77ad5f8d1aa4-utilities\") pod \"redhat-operators-82ghw\" (UID: \"d4ad48f7-e68b-4e17-a36b-77ad5f8d1aa4\") " pod="openshift-marketplace/redhat-operators-82ghw" Sep 29 11:25:10 crc kubenswrapper[4779]: I0929 11:25:10.660362 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4ad48f7-e68b-4e17-a36b-77ad5f8d1aa4-utilities\") pod \"redhat-operators-82ghw\" (UID: \"d4ad48f7-e68b-4e17-a36b-77ad5f8d1aa4\") " pod="openshift-marketplace/redhat-operators-82ghw" Sep 29 11:25:10 crc kubenswrapper[4779]: I0929 11:25:10.661176 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4ad48f7-e68b-4e17-a36b-77ad5f8d1aa4-catalog-content\") pod \"redhat-operators-82ghw\" (UID: \"d4ad48f7-e68b-4e17-a36b-77ad5f8d1aa4\") " pod="openshift-marketplace/redhat-operators-82ghw" Sep 29 11:25:10 crc kubenswrapper[4779]: I0929 11:25:10.661340 4779 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cbv99\" (UniqueName: \"kubernetes.io/projected/d4ad48f7-e68b-4e17-a36b-77ad5f8d1aa4-kube-api-access-cbv99\") pod \"redhat-operators-82ghw\" (UID: \"d4ad48f7-e68b-4e17-a36b-77ad5f8d1aa4\") " pod="openshift-marketplace/redhat-operators-82ghw" Sep 29 11:25:10 crc kubenswrapper[4779]: I0929 11:25:10.661525 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4ad48f7-e68b-4e17-a36b-77ad5f8d1aa4-catalog-content\") pod \"redhat-operators-82ghw\" (UID: \"d4ad48f7-e68b-4e17-a36b-77ad5f8d1aa4\") " pod="openshift-marketplace/redhat-operators-82ghw" Sep 29 11:25:10 crc kubenswrapper[4779]: I0929 11:25:10.687981 4779 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cbv99\" (UniqueName: \"kubernetes.io/projected/d4ad48f7-e68b-4e17-a36b-77ad5f8d1aa4-kube-api-access-cbv99\") pod \"redhat-operators-82ghw\" (UID: \"d4ad48f7-e68b-4e17-a36b-77ad5f8d1aa4\") " pod="openshift-marketplace/redhat-operators-82ghw" Sep 29 11:25:10 crc kubenswrapper[4779]: I0929 11:25:10.715584 4779 scope.go:117] "RemoveContainer" containerID="e97f4046eb906ccaacbf65683e1f4a4e5b7c1d12fcdf2f114dd137565f224364" Sep 29 11:25:10 crc kubenswrapper[4779]: E0929 11:25:10.715874 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 11:25:10 crc kubenswrapper[4779]: I0929 11:25:10.804050 4779 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-82ghw" Sep 29 11:25:11 crc kubenswrapper[4779]: I0929 11:25:11.283494 4779 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-82ghw"] Sep 29 11:25:12 crc kubenswrapper[4779]: I0929 11:25:12.164946 4779 generic.go:334] "Generic (PLEG): container finished" podID="d4ad48f7-e68b-4e17-a36b-77ad5f8d1aa4" containerID="a97f1a7f640856e358382dfd592052f2dfb46175af9cee4a974cc6de0be683ce" exitCode=0 Sep 29 11:25:12 crc kubenswrapper[4779]: I0929 11:25:12.165015 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-82ghw" event={"ID":"d4ad48f7-e68b-4e17-a36b-77ad5f8d1aa4","Type":"ContainerDied","Data":"a97f1a7f640856e358382dfd592052f2dfb46175af9cee4a974cc6de0be683ce"} Sep 29 11:25:12 crc kubenswrapper[4779]: I0929 11:25:12.165589 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-82ghw" event={"ID":"d4ad48f7-e68b-4e17-a36b-77ad5f8d1aa4","Type":"ContainerStarted","Data":"acbb19d82a51acf3f7d5d2db68244f6578fbcc4e0d6f9d3de95b84cb16d6e73b"} Sep 29 11:25:14 crc kubenswrapper[4779]: I0929 11:25:14.198832 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-82ghw" event={"ID":"d4ad48f7-e68b-4e17-a36b-77ad5f8d1aa4","Type":"ContainerStarted","Data":"dfd7e0de7d96acd8a1576a21e437c02cd2599b996c24ebe91e7fc25d5d978fbd"} Sep 29 11:25:16 crc kubenswrapper[4779]: I0929 11:25:16.218869 4779 generic.go:334] "Generic (PLEG): container finished" podID="d4ad48f7-e68b-4e17-a36b-77ad5f8d1aa4" containerID="dfd7e0de7d96acd8a1576a21e437c02cd2599b996c24ebe91e7fc25d5d978fbd" exitCode=0 Sep 29 11:25:16 crc kubenswrapper[4779]: I0929 11:25:16.218948 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-82ghw" event={"ID":"d4ad48f7-e68b-4e17-a36b-77ad5f8d1aa4","Type":"ContainerDied","Data":"dfd7e0de7d96acd8a1576a21e437c02cd2599b996c24ebe91e7fc25d5d978fbd"} Sep 29 11:25:17 crc kubenswrapper[4779]: I0929 11:25:17.231235 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-82ghw" event={"ID":"d4ad48f7-e68b-4e17-a36b-77ad5f8d1aa4","Type":"ContainerStarted","Data":"428935634c03d1c69032f9e26915c298c78cab9c511c72d3b39d2a7b61f62994"} Sep 29 11:25:17 crc kubenswrapper[4779]: I0929 11:25:17.256643 4779 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-82ghw" podStartSLOduration=2.795907884 podStartE2EDuration="7.256613286s" podCreationTimestamp="2025-09-29 11:25:10 +0000 UTC" firstStartedPulling="2025-09-29 11:25:12.166890514 +0000 UTC m=+6944.148214418" lastFinishedPulling="2025-09-29 11:25:16.627595916 +0000 UTC m=+6948.608919820" observedRunningTime="2025-09-29 11:25:17.253543507 +0000 UTC m=+6949.234867421" watchObservedRunningTime="2025-09-29 11:25:17.256613286 +0000 UTC m=+6949.237937190" Sep 29 11:25:20 crc kubenswrapper[4779]: I0929 11:25:20.804224 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-82ghw" Sep 29 11:25:20 crc kubenswrapper[4779]: I0929 11:25:20.804534 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-82ghw" Sep 29 11:25:21 crc kubenswrapper[4779]: I0929 11:25:21.853382 4779 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-82ghw" podUID="d4ad48f7-e68b-4e17-a36b-77ad5f8d1aa4" containerName="registry-server" probeResult="failure" output=< Sep 29 11:25:21 crc kubenswrapper[4779]: timeout: failed to connect service ":50051" within 1s Sep 29 11:25:21 crc kubenswrapper[4779]: > Sep 29 11:25:25 crc kubenswrapper[4779]: I0929 11:25:25.714603 4779 scope.go:117] "RemoveContainer" containerID="e97f4046eb906ccaacbf65683e1f4a4e5b7c1d12fcdf2f114dd137565f224364" Sep 29 11:25:25 crc kubenswrapper[4779]: E0929 11:25:25.715836 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 11:25:30 crc kubenswrapper[4779]: I0929 11:25:30.866181 4779 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-82ghw" Sep 29 11:25:30 crc kubenswrapper[4779]: I0929 11:25:30.922728 4779 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-82ghw" Sep 29 11:25:31 crc kubenswrapper[4779]: I0929 11:25:31.106171 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-82ghw"] Sep 29 11:25:32 crc kubenswrapper[4779]: I0929 11:25:32.372825 4779 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-82ghw" podUID="d4ad48f7-e68b-4e17-a36b-77ad5f8d1aa4" containerName="registry-server" containerID="cri-o://428935634c03d1c69032f9e26915c298c78cab9c511c72d3b39d2a7b61f62994" gracePeriod=2 Sep 29 11:25:32 crc kubenswrapper[4779]: I0929 11:25:32.821242 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-82ghw" Sep 29 11:25:32 crc kubenswrapper[4779]: I0929 11:25:32.891777 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cbv99\" (UniqueName: \"kubernetes.io/projected/d4ad48f7-e68b-4e17-a36b-77ad5f8d1aa4-kube-api-access-cbv99\") pod \"d4ad48f7-e68b-4e17-a36b-77ad5f8d1aa4\" (UID: \"d4ad48f7-e68b-4e17-a36b-77ad5f8d1aa4\") " Sep 29 11:25:32 crc kubenswrapper[4779]: I0929 11:25:32.891970 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4ad48f7-e68b-4e17-a36b-77ad5f8d1aa4-catalog-content\") pod \"d4ad48f7-e68b-4e17-a36b-77ad5f8d1aa4\" (UID: \"d4ad48f7-e68b-4e17-a36b-77ad5f8d1aa4\") " Sep 29 11:25:32 crc kubenswrapper[4779]: I0929 11:25:32.895271 4779 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4ad48f7-e68b-4e17-a36b-77ad5f8d1aa4-utilities\") pod \"d4ad48f7-e68b-4e17-a36b-77ad5f8d1aa4\" (UID: \"d4ad48f7-e68b-4e17-a36b-77ad5f8d1aa4\") " Sep 29 11:25:32 crc kubenswrapper[4779]: I0929 11:25:32.897756 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d4ad48f7-e68b-4e17-a36b-77ad5f8d1aa4-utilities" (OuterVolumeSpecName: "utilities") pod "d4ad48f7-e68b-4e17-a36b-77ad5f8d1aa4" (UID: "d4ad48f7-e68b-4e17-a36b-77ad5f8d1aa4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 11:25:32 crc kubenswrapper[4779]: I0929 11:25:32.904138 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4ad48f7-e68b-4e17-a36b-77ad5f8d1aa4-kube-api-access-cbv99" (OuterVolumeSpecName: "kube-api-access-cbv99") pod "d4ad48f7-e68b-4e17-a36b-77ad5f8d1aa4" (UID: "d4ad48f7-e68b-4e17-a36b-77ad5f8d1aa4"). InnerVolumeSpecName "kube-api-access-cbv99". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 11:25:32 crc kubenswrapper[4779]: I0929 11:25:32.967900 4779 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d4ad48f7-e68b-4e17-a36b-77ad5f8d1aa4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d4ad48f7-e68b-4e17-a36b-77ad5f8d1aa4" (UID: "d4ad48f7-e68b-4e17-a36b-77ad5f8d1aa4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 11:25:32 crc kubenswrapper[4779]: I0929 11:25:32.998728 4779 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4ad48f7-e68b-4e17-a36b-77ad5f8d1aa4-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 11:25:32 crc kubenswrapper[4779]: I0929 11:25:32.998770 4779 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cbv99\" (UniqueName: \"kubernetes.io/projected/d4ad48f7-e68b-4e17-a36b-77ad5f8d1aa4-kube-api-access-cbv99\") on node \"crc\" DevicePath \"\"" Sep 29 11:25:32 crc kubenswrapper[4779]: I0929 11:25:32.998781 4779 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4ad48f7-e68b-4e17-a36b-77ad5f8d1aa4-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 11:25:33 crc kubenswrapper[4779]: I0929 11:25:33.385502 4779 generic.go:334] "Generic (PLEG): container finished" podID="d4ad48f7-e68b-4e17-a36b-77ad5f8d1aa4" containerID="428935634c03d1c69032f9e26915c298c78cab9c511c72d3b39d2a7b61f62994" exitCode=0 Sep 29 11:25:33 crc kubenswrapper[4779]: I0929 11:25:33.385583 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-82ghw" event={"ID":"d4ad48f7-e68b-4e17-a36b-77ad5f8d1aa4","Type":"ContainerDied","Data":"428935634c03d1c69032f9e26915c298c78cab9c511c72d3b39d2a7b61f62994"} Sep 29 11:25:33 crc kubenswrapper[4779]: I0929 11:25:33.385635 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-82ghw" event={"ID":"d4ad48f7-e68b-4e17-a36b-77ad5f8d1aa4","Type":"ContainerDied","Data":"acbb19d82a51acf3f7d5d2db68244f6578fbcc4e0d6f9d3de95b84cb16d6e73b"} Sep 29 11:25:33 crc kubenswrapper[4779]: I0929 11:25:33.385661 4779 scope.go:117] "RemoveContainer" containerID="428935634c03d1c69032f9e26915c298c78cab9c511c72d3b39d2a7b61f62994" Sep 29 11:25:33 crc kubenswrapper[4779]: I0929 11:25:33.385938 4779 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-82ghw" Sep 29 11:25:33 crc kubenswrapper[4779]: I0929 11:25:33.415221 4779 scope.go:117] "RemoveContainer" containerID="dfd7e0de7d96acd8a1576a21e437c02cd2599b996c24ebe91e7fc25d5d978fbd" Sep 29 11:25:33 crc kubenswrapper[4779]: I0929 11:25:33.431076 4779 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-82ghw"] Sep 29 11:25:33 crc kubenswrapper[4779]: I0929 11:25:33.438371 4779 scope.go:117] "RemoveContainer" containerID="a97f1a7f640856e358382dfd592052f2dfb46175af9cee4a974cc6de0be683ce" Sep 29 11:25:33 crc kubenswrapper[4779]: I0929 11:25:33.439465 4779 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-82ghw"] Sep 29 11:25:33 crc kubenswrapper[4779]: I0929 11:25:33.491128 4779 scope.go:117] "RemoveContainer" containerID="428935634c03d1c69032f9e26915c298c78cab9c511c72d3b39d2a7b61f62994" Sep 29 11:25:33 crc kubenswrapper[4779]: E0929 11:25:33.492414 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"428935634c03d1c69032f9e26915c298c78cab9c511c72d3b39d2a7b61f62994\": container with ID starting with 428935634c03d1c69032f9e26915c298c78cab9c511c72d3b39d2a7b61f62994 not found: ID does not exist" containerID="428935634c03d1c69032f9e26915c298c78cab9c511c72d3b39d2a7b61f62994" Sep 29 11:25:33 crc kubenswrapper[4779]: I0929 11:25:33.492486 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"428935634c03d1c69032f9e26915c298c78cab9c511c72d3b39d2a7b61f62994"} err="failed to get container status \"428935634c03d1c69032f9e26915c298c78cab9c511c72d3b39d2a7b61f62994\": rpc error: code = NotFound desc = could not find container \"428935634c03d1c69032f9e26915c298c78cab9c511c72d3b39d2a7b61f62994\": container with ID starting with 428935634c03d1c69032f9e26915c298c78cab9c511c72d3b39d2a7b61f62994 not found: ID does not exist" Sep 29 11:25:33 crc kubenswrapper[4779]: I0929 11:25:33.492530 4779 scope.go:117] "RemoveContainer" containerID="dfd7e0de7d96acd8a1576a21e437c02cd2599b996c24ebe91e7fc25d5d978fbd" Sep 29 11:25:33 crc kubenswrapper[4779]: E0929 11:25:33.492937 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dfd7e0de7d96acd8a1576a21e437c02cd2599b996c24ebe91e7fc25d5d978fbd\": container with ID starting with dfd7e0de7d96acd8a1576a21e437c02cd2599b996c24ebe91e7fc25d5d978fbd not found: ID does not exist" containerID="dfd7e0de7d96acd8a1576a21e437c02cd2599b996c24ebe91e7fc25d5d978fbd" Sep 29 11:25:33 crc kubenswrapper[4779]: I0929 11:25:33.493048 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dfd7e0de7d96acd8a1576a21e437c02cd2599b996c24ebe91e7fc25d5d978fbd"} err="failed to get container status \"dfd7e0de7d96acd8a1576a21e437c02cd2599b996c24ebe91e7fc25d5d978fbd\": rpc error: code = NotFound desc = could not find container \"dfd7e0de7d96acd8a1576a21e437c02cd2599b996c24ebe91e7fc25d5d978fbd\": container with ID starting with dfd7e0de7d96acd8a1576a21e437c02cd2599b996c24ebe91e7fc25d5d978fbd not found: ID does not exist" Sep 29 11:25:33 crc kubenswrapper[4779]: I0929 11:25:33.493137 4779 scope.go:117] "RemoveContainer" containerID="a97f1a7f640856e358382dfd592052f2dfb46175af9cee4a974cc6de0be683ce" Sep 29 11:25:33 crc kubenswrapper[4779]: E0929 11:25:33.493758 4779 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a97f1a7f640856e358382dfd592052f2dfb46175af9cee4a974cc6de0be683ce\": container with ID starting with a97f1a7f640856e358382dfd592052f2dfb46175af9cee4a974cc6de0be683ce not found: ID does not exist" containerID="a97f1a7f640856e358382dfd592052f2dfb46175af9cee4a974cc6de0be683ce" Sep 29 11:25:33 crc kubenswrapper[4779]: I0929 11:25:33.493788 4779 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a97f1a7f640856e358382dfd592052f2dfb46175af9cee4a974cc6de0be683ce"} err="failed to get container status \"a97f1a7f640856e358382dfd592052f2dfb46175af9cee4a974cc6de0be683ce\": rpc error: code = NotFound desc = could not find container \"a97f1a7f640856e358382dfd592052f2dfb46175af9cee4a974cc6de0be683ce\": container with ID starting with a97f1a7f640856e358382dfd592052f2dfb46175af9cee4a974cc6de0be683ce not found: ID does not exist" Sep 29 11:25:34 crc kubenswrapper[4779]: I0929 11:25:34.727013 4779 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d4ad48f7-e68b-4e17-a36b-77ad5f8d1aa4" path="/var/lib/kubelet/pods/d4ad48f7-e68b-4e17-a36b-77ad5f8d1aa4/volumes" Sep 29 11:25:39 crc kubenswrapper[4779]: I0929 11:25:39.714966 4779 scope.go:117] "RemoveContainer" containerID="e97f4046eb906ccaacbf65683e1f4a4e5b7c1d12fcdf2f114dd137565f224364" Sep 29 11:25:39 crc kubenswrapper[4779]: E0929 11:25:39.716127 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 11:25:53 crc kubenswrapper[4779]: I0929 11:25:53.715396 4779 scope.go:117] "RemoveContainer" containerID="e97f4046eb906ccaacbf65683e1f4a4e5b7c1d12fcdf2f114dd137565f224364" Sep 29 11:25:53 crc kubenswrapper[4779]: E0929 11:25:53.716603 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 11:26:07 crc kubenswrapper[4779]: I0929 11:26:07.715131 4779 scope.go:117] "RemoveContainer" containerID="e97f4046eb906ccaacbf65683e1f4a4e5b7c1d12fcdf2f114dd137565f224364" Sep 29 11:26:07 crc kubenswrapper[4779]: E0929 11:26:07.716284 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 11:26:19 crc kubenswrapper[4779]: I0929 11:26:19.714608 4779 scope.go:117] "RemoveContainer" containerID="e97f4046eb906ccaacbf65683e1f4a4e5b7c1d12fcdf2f114dd137565f224364" Sep 29 11:26:19 crc kubenswrapper[4779]: E0929 11:26:19.715429 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 11:26:33 crc kubenswrapper[4779]: I0929 11:26:33.714597 4779 scope.go:117] "RemoveContainer" containerID="e97f4046eb906ccaacbf65683e1f4a4e5b7c1d12fcdf2f114dd137565f224364" Sep 29 11:26:33 crc kubenswrapper[4779]: E0929 11:26:33.715719 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 11:26:46 crc kubenswrapper[4779]: I0929 11:26:46.715013 4779 scope.go:117] "RemoveContainer" containerID="e97f4046eb906ccaacbf65683e1f4a4e5b7c1d12fcdf2f114dd137565f224364" Sep 29 11:26:46 crc kubenswrapper[4779]: E0929 11:26:46.716725 4779 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5lnlv_openshift-machine-config-operator(f1a5d3a7-37d9-4a87-864c-e4af7f504a19)\"" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" podUID="f1a5d3a7-37d9-4a87-864c-e4af7f504a19" Sep 29 11:26:52 crc kubenswrapper[4779]: I0929 11:26:52.326953 4779 scope.go:117] "RemoveContainer" containerID="220528a6cb5d0258af78014b1af4861bc26db1cb1839cfce87ae080edfe7bbae" Sep 29 11:26:58 crc kubenswrapper[4779]: I0929 11:26:58.729032 4779 scope.go:117] "RemoveContainer" containerID="e97f4046eb906ccaacbf65683e1f4a4e5b7c1d12fcdf2f114dd137565f224364" Sep 29 11:26:59 crc kubenswrapper[4779]: I0929 11:26:59.268034 4779 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5lnlv" event={"ID":"f1a5d3a7-37d9-4a87-864c-e4af7f504a19","Type":"ContainerStarted","Data":"8de465166ccad2c718cf982507c0e42e05c752e9c3cbb5ec27c7c063fb3579fd"} var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515066466436024465 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015066466436017402 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015066450202016505 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015066450202015455 5ustar corecore